hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4513ae6632fe871767cf7caa17ef83db4f5b1f71
| 196
|
py
|
Python
|
uberspace_templates/templates/__init__.py
|
imsoftware/uberspace-templates
|
57b4643809bf9618f9f489189528420ec6b84f2a
|
[
"MIT"
] | null | null | null |
uberspace_templates/templates/__init__.py
|
imsoftware/uberspace-templates
|
57b4643809bf9618f9f489189528420ec6b84f2a
|
[
"MIT"
] | null | null | null |
uberspace_templates/templates/__init__.py
|
imsoftware/uberspace-templates
|
57b4643809bf9618f9f489189528420ec6b84f2a
|
[
"MIT"
] | 1
|
2020-11-21T23:20:27.000Z
|
2020-11-21T23:20:27.000Z
|
from uberspace_templates.templates.flask import create as flask
from uberspace_templates.templates.website import create as website
from uberspace_templates.templates.vuejs import create as vuejs
| 49
| 67
| 0.877551
| 27
| 196
| 6.259259
| 0.333333
| 0.230769
| 0.390533
| 0.550296
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091837
| 196
| 3
| 68
| 65.333333
| 0.949438
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
188921a36f6e025f84f85833cbfb1baabeded125
| 23,511
|
py
|
Python
|
graphpype/pipelines/nii_to_conmat.py
|
davidmeunier79/graphpype
|
800d1f8cbfdf3a18de77558c3b88eeb31735857e
|
[
"BSD-3-Clause"
] | null | null | null |
graphpype/pipelines/nii_to_conmat.py
|
davidmeunier79/graphpype
|
800d1f8cbfdf3a18de77558c3b88eeb31735857e
|
[
"BSD-3-Clause"
] | null | null | null |
graphpype/pipelines/nii_to_conmat.py
|
davidmeunier79/graphpype
|
800d1f8cbfdf3a18de77558c3b88eeb31735857e
|
[
"BSD-3-Clause"
] | null | null | null |
"""
From nifti file to conmat
"""
import nipype.pipeline.engine as pe
import nipype.interfaces.utility as niu
import nipype.interfaces.spm.utils as spmu
from nipype.interfaces.niftyreg.regutils import RegResample
from graphpype.nodes.correl_mat import (
IntersectMask, ExtractTS, ExtractMeanTS, RegressCovar, FindSPMRegressor,
ComputeConfCorMat)
from graphpype.utils import show_files
def create_pipeline_nii_to_conmat_simple(
main_path, pipeline_name="nii_to_conmat", conf_interval_prob=0.05,
background_val=-1.0, plot=True):
"""
Description:
Pipeline from nifti 4D (after preprocessing) to connectivity matrices,
no segmentation in tissues given, but coords for wm and csf are available
and regressed. coords / labels o indexed mask are also available
Inputs (inputnode):
* nii_4D_file
* ROI_mask_file
Optional inputs (inputnode) :
* rp_file
* ROI_coords_file
* ROI_MNI_coords_file
* ROI_labels_file
Comments:
Typically used after nipype preprocessing pipeline and before
conmat_to_graph pipeline
"""
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=[
'nii_4D_file', 'ROI_mask_file', 'rp_file', 'ROI_coords_file',
'ROI_MNI_coords_file', 'ROI_labels_file']), name='inputnode')
# Nodes version: use min_BOLD_intensity and
# return coords where signal is strong enough
extract_mean_ROI_ts = pe.Node(interface=ExtractTS(
plot_fig=False), name='extract_mean_ROI_ts')
extract_mean_ROI_ts.inputs.background_val = background_val
pipeline.connect(inputnode, 'nii_4D_file', extract_mean_ROI_ts, 'file_4D')
pipeline.connect(inputnode, 'ROI_mask_file',
extract_mean_ROI_ts, 'indexed_rois_file')
pipeline.connect(inputnode, 'ROI_coords_file',
extract_mean_ROI_ts, 'coord_rois_file')
pipeline.connect(inputnode, 'ROI_MNI_coords_file',
extract_mean_ROI_ts, 'MNI_coord_rois_file')
pipeline.connect(inputnode, 'ROI_labels_file',
extract_mean_ROI_ts, 'label_rois_file')
# regress covariates
regress_covar = pe.Node(interface=RegressCovar(plot_fig=plot), iterfield=[
'masked_ts_file', 'rp_file'], name='regress_covar')
pipeline.connect(extract_mean_ROI_ts, 'mean_masked_ts_file',
regress_covar, 'masked_ts_file')
pipeline.connect(inputnode, 'rp_file', regress_covar, 'rp_file')
# compute correlations
compute_conf_cor_mat = pe.Node(
interface=ComputeConfCorMat(plot_mat=plot),
name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(regress_covar, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(extract_mean_ROI_ts, 'subj_label_rois_file',
compute_conf_cor_mat, 'labels_file')
return pipeline
def create_pipeline_nii_to_conmat_seg_template(
main_path, pipeline_name="nii_to_conmat", conf_interval_prob=0.05):
"""
Description:
Pipeline from nifti 4D (after preprocessing) to connectivity matrices
Inputs (inputnode):
* nii_4D_file
* rp_file
* wm_anat_file
* csf_anat_file
* ROI_mask_file
* ROI_coords_file
* ROI_MNI_coords_file
* ROI_labels_file
Comments:
Typically used after nipype preprocessing pipeline and
before conmat_to_graph pipeline
"""
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=[
'nii_4D_file', 'rp_file', 'wm_anat_file', 'csf_anat_file',
'ROI_mask_file', 'ROI_coords_file', 'ROI_MNI_coords_file',
'ROI_labels_file']), name='inputnode')
# Nodes version: use min_BOLD_intensity and
# return coords where signal is strong enough
extract_mean_ROI_ts = pe.Node(interface=ExtractTS(
plot_fig=False), name='extract_mean_ROI_ts')
pipeline.connect(inputnode, 'nii_4D_file', extract_mean_ROI_ts, 'file_4D')
pipeline.connect(inputnode, 'ROI_mask_file',
extract_mean_ROI_ts, 'indexed_rois_file')
pipeline.connect(inputnode, 'ROI_coords_file',
extract_mean_ROI_ts, 'coord_rois_file')
pipeline.connect(inputnode, 'ROI_MNI_coords_file',
extract_mean_ROI_ts, 'MNI_coord_rois_file')
pipeline.connect(inputnode, 'ROI_labels_file',
extract_mean_ROI_ts, 'label_rois_file')
# extract white matter signal
compute_wm_ts = pe.Node(interface=ExtractMeanTS(
plot_fig=False), name='extract_wm_ts')
compute_wm_ts.inputs.suffix = 'wm'
pipeline.connect(inputnode, 'nii_4D_file', compute_wm_ts, 'file_4D')
pipeline.connect(inputnode, 'wm_anat_file',
compute_wm_ts, 'filter_mask_file')
# extract csf signal
compute_csf_ts = pe.Node(interface=ExtractMeanTS(
plot_fig=False), name='extract_csf_ts')
compute_csf_ts.inputs.suffix = 'csf'
pipeline.connect(inputnode, 'nii_4D_file', compute_csf_ts, 'file_4D')
pipeline.connect(inputnode, 'csf_anat_file',
compute_csf_ts, 'filter_mask_file')
regress_covar = pe.Node(interface=RegressCovar(), iterfield=[
'masked_ts_file', 'rp_file'], name='regress_covar')
pipeline.connect(extract_mean_ROI_ts, 'mean_masked_ts_file',
regress_covar, 'masked_ts_file')
pipeline.connect(compute_wm_ts, 'mean_masked_ts_file',
regress_covar, 'mean_wm_ts_file')
pipeline.connect(compute_csf_ts, 'mean_masked_ts_file',
regress_covar, 'mean_csf_ts_file')
pipeline.connect(inputnode, 'rp_file', regress_covar, 'rp_file')
# compute correlations
compute_conf_cor_mat = pe.Node(
interface=ComputeConfCorMat(), name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(regress_covar, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(extract_mean_ROI_ts, 'subj_label_rois_file',
compute_conf_cor_mat, 'labels_file')
return pipeline
def create_pipeline_nii_to_subj_ROI(
main_path, filter_gm_threshold=0.9, pipeline_name="nii_to_subj_ROI",
background_val=-1.0, plot=True, reslice=False, resample=False,
min_BOLD_intensity=50, percent_signal=0.5):
"""
Description:
Pipeline from nifti 4D (after preprocessing) to connectivity matrices
Use Grey matter for having a mask for each subject
Inputs (inputnode):
* nii_4D_file
* gm_anat_file
* ROI_mask_file
* ROI_coords_file
* ROI_MNI_coords_file
* ROI_labels_file
Comments:
Typically used after nipype preprocessing pipeline and
before conmat_to_graph pipeline
"""
if reslice and resample:
print("Only reslice OR resample can be true, setting reslice to False")
reslice = False
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=[
'nii_4D_file', 'ROI_mask_file', 'gm_anat_file', 'ROI_coords_file',
'ROI_MNI_coords_file', 'ROI_labels_file']), name='inputnode')
# reslice gm
if reslice:
reslice_gm = pe.Node(interface=spmu.Reslice(), name='reslice_gm')
pipeline.connect(inputnode, 'ROI_mask_file',
reslice_gm, 'space_defining')
pipeline.connect(inputnode, 'gm_anat_file', reslice_gm, 'in_file')
if resample:
resample_gm = pe.Node(interface=RegResample(), name='resample_gm')
pipeline.connect(inputnode, 'ROI_mask_file', resample_gm, 'ref_file')
pipeline.connect(inputnode, 'gm_anat_file', resample_gm, 'flo_file')
# Preprocess pipeline,
filter_ROI_mask_with_GM = pe.Node(
interface=IntersectMask(), name='filter_ROI_mask_with_GM')
filter_ROI_mask_with_GM.inputs.filter_thr = filter_gm_threshold
filter_ROI_mask_with_GM.inputs.background_val = background_val
pipeline.connect(inputnode, 'ROI_mask_file',
filter_ROI_mask_with_GM, 'indexed_rois_file')
pipeline.connect(inputnode, 'ROI_coords_file',
filter_ROI_mask_with_GM, 'coords_rois_file')
pipeline.connect(inputnode, 'ROI_MNI_coords_file',
filter_ROI_mask_with_GM, 'MNI_coords_rois_file')
pipeline.connect(inputnode, 'ROI_labels_file',
filter_ROI_mask_with_GM, 'labels_rois_file')
if reslice:
pipeline.connect(reslice_gm, 'out_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
elif resample:
pipeline.connect(resample_gm, 'out_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
else:
pipeline.connect(inputnode, 'gm_anat_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
# Nodes version: use min_BOLD_intensity and
# return coords where signal is strong enough
extract_mean_ROI_ts = pe.Node(interface=ExtractTS(
plot_fig=plot), name='extract_mean_ROI_ts')
extract_mean_ROI_ts.inputs.percent_signal = percent_signal
extract_mean_ROI_ts.inputs.min_BOLD_intensity = min_BOLD_intensity
pipeline.connect(inputnode, 'nii_4D_file', extract_mean_ROI_ts, 'file_4D')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_indexed_rois_file',
extract_mean_ROI_ts, 'indexed_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_MNI_coords_rois_file',
extract_mean_ROI_ts, 'MNI_coord_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_coords_rois_file',
extract_mean_ROI_ts, 'coord_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_labels_rois_file',
extract_mean_ROI_ts, 'label_rois_file')
return pipeline
def create_pipeline_nii_to_conmat(
main_path, filter_gm_threshold=0.9, pipeline_name="nii_to_conmat",
conf_interval_prob=0.05, background_val=-1.0, plot=True,
reslice=False, resample=False, min_BOLD_intensity=50,
percent_signal=0.5):
"""
Description:
Pipeline from nifti 4D (after preprocessing) to connectivity matrices
Inputs (inputnode):
* nii_4D_file
* rp_file
* ROI_mask_file
* gm_anat_file
* wm_anat_file
* csf_anat_file
* ROI_coords_file
* ROI_MNI_coords_file
* ROI_labels_file
Comments:
Typically used after nipype preprocessing pipeline and
before conmat_to_graph pipeline
"""
if reslice and resample:
print("Only reslice OR resample can be true, setting reslice to False")
reslice = False
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=[
'nii_4D_file', 'ROI_mask_file', 'rp_file', 'gm_anat_file',
'wm_anat_file', 'csf_anat_file', 'ROI_coords_file',
'ROI_MNI_coords_file', 'ROI_labels_file']), name='inputnode')
# reslice gm
if reslice:
reslice_gm = pe.Node(interface=spmu.Reslice(), name='reslice_gm')
pipeline.connect(inputnode, 'ROI_mask_file', reslice_gm,
'space_defining')
pipeline.connect(inputnode, 'gm_anat_file', reslice_gm, 'in_file')
if resample:
resample_gm = pe.Node(interface=RegResample(), name='resample_gm')
pipeline.connect(inputnode, 'ROI_mask_file', resample_gm,
'ref_file')
pipeline.connect(inputnode, 'gm_anat_file', resample_gm, 'flo_file')
# reslice wm
if reslice:
reslice_wm = pe.Node(interface=spmu.Reslice(), name='reslice_wm')
pipeline.connect(inputnode, 'ROI_mask_file', reslice_wm,
'space_defining')
pipeline.connect(inputnode, 'wm_anat_file', reslice_wm, 'in_file')
if resample:
resample_wm = pe.Node(interface=RegResample(), name='resample_wm')
pipeline.connect(inputnode, 'ROI_mask_file', resample_wm,
'ref_file')
pipeline.connect(inputnode, 'wm_anat_file', resample_wm, 'flo_file')
# reslice csf
if reslice:
reslice_csf = pe.Node(interface=spmu.Reslice(), name='reslice_csf')
pipeline.connect(inputnode, 'ROI_mask_file', reslice_csf,
'space_defining')
pipeline.connect(inputnode, 'csf_anat_file', reslice_csf, 'in_file')
if resample:
resample_csf = pe.Node(interface=RegResample(), name='resample_csf')
pipeline.connect(inputnode, 'ROI_mask_file', resample_csf,
'ref_file')
pipeline.connect(inputnode, 'csf_anat_file', resample_csf, 'flo_file')
# Preprocess pipeline,
filter_ROI_mask_with_GM = pe.Node(
interface=IntersectMask(), name='filter_ROI_mask_with_GM')
filter_ROI_mask_with_GM.inputs.filter_thr = filter_gm_threshold
filter_ROI_mask_with_GM.inputs.background_val = background_val
pipeline.connect(inputnode, 'ROI_mask_file',
filter_ROI_mask_with_GM, 'indexed_rois_file')
pipeline.connect(inputnode, 'ROI_coords_file',
filter_ROI_mask_with_GM, 'coords_rois_file')
pipeline.connect(inputnode, 'ROI_MNI_coords_file',
filter_ROI_mask_with_GM, 'MNI_coords_rois_file')
pipeline.connect(inputnode, 'ROI_labels_file',
filter_ROI_mask_with_GM, 'labels_rois_file')
if reslice:
pipeline.connect(reslice_gm, 'out_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
elif resample:
pipeline.connect(resample_gm, 'out_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
else:
pipeline.connect(inputnode, 'gm_anat_file',
filter_ROI_mask_with_GM, 'filter_mask_file')
# Nodes version: use min_BOLD_intensity and
# return coords where signal is strong enough
extract_mean_ROI_ts = pe.Node(interface=ExtractTS(
plot_fig=plot), name='extract_mean_ROI_ts')
extract_mean_ROI_ts.inputs.percent_signal = percent_signal
extract_mean_ROI_ts.inputs.min_BOLD_intensity = min_BOLD_intensity
pipeline.connect(inputnode, 'nii_4D_file', extract_mean_ROI_ts, 'file_4D')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_indexed_rois_file',
extract_mean_ROI_ts, 'indexed_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_MNI_coords_rois_file',
extract_mean_ROI_ts, 'MNI_coord_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_coords_rois_file',
extract_mean_ROI_ts, 'coord_rois_file')
pipeline.connect(filter_ROI_mask_with_GM, 'filtered_labels_rois_file',
extract_mean_ROI_ts, 'label_rois_file')
# extract white matter signal
compute_wm_ts = pe.Node(interface=ExtractMeanTS(
plot_fig=plot), name='extract_wm_ts')
compute_wm_ts.inputs.suffix = 'wm'
pipeline.connect(inputnode, 'nii_4D_file', compute_wm_ts, 'file_4D')
if reslice:
pipeline.connect(reslice_wm, 'out_file',
compute_wm_ts, 'filter_mask_file')
elif resample:
pipeline.connect(resample_wm, 'out_file',
compute_wm_ts, 'filter_mask_file')
else:
pipeline.connect(inputnode, 'wm_anat_file',
compute_wm_ts, 'filter_mask_file')
# extract csf signal
compute_csf_ts = pe.Node(interface=ExtractMeanTS(
plot_fig=plot), name='extract_csf_ts')
compute_csf_ts.inputs.suffix = 'csf'
pipeline.connect(inputnode, 'nii_4D_file', compute_csf_ts, 'file_4D')
if reslice:
pipeline.connect(reslice_csf, 'out_file',
compute_csf_ts, 'filter_mask_file')
elif resample:
pipeline.connect(resample_csf, 'out_file',
compute_csf_ts, 'filter_mask_file')
else:
pipeline.connect(inputnode, 'csf_anat_file',
compute_csf_ts, 'filter_mask_file')
# regress covariates
regress_covar = pe.Node(interface=RegressCovar(plot_fig=plot), iterfield=[
'masked_ts_file', 'rp_file', 'mean_wm_ts_file', 'mean_csf_ts_file'],
name='regress_covar')
pipeline.connect(extract_mean_ROI_ts, 'mean_masked_ts_file',
regress_covar, 'masked_ts_file')
pipeline.connect(inputnode, 'rp_file', regress_covar, 'rp_file')
pipeline.connect(compute_wm_ts, 'mean_masked_ts_file',
regress_covar, 'mean_wm_ts_file')
pipeline.connect(compute_csf_ts, 'mean_masked_ts_file',
regress_covar, 'mean_csf_ts_file')
# compute correlations
compute_conf_cor_mat = pe.Node(interface=ComputeConfCorMat(
plot_mat=plot), name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(regress_covar, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(extract_mean_ROI_ts, 'subj_label_rois_file',
compute_conf_cor_mat, 'labels_file')
return pipeline
def create_pipeline_nii_to_weighted_conmat(
main_path, pipeline_name="nii_to_weighted_conmat",
concatenated_runs=True, conf_interval_prob=0.05, mult_regnames=True,
spm_reg=True):
"""
Description:
Pipeline from resid_ts_file (after preprocessing) to weighted connectivity
matrices
Involves a regressor file as wiehgt for computing weighted correlations
Parameters:
* main_path: path where the analysis will be located
(base_dir of workflow)
* pipeline_name (default = "nii_to_weighted_conmat"):
name of the workflow that will be created for this analysis
* concatenated_runs (default = True):
If several sessions are contained in the same SPM.mat
* conf_interval_prob (default = 0.05):
default confidence interval value for thresholding connectivity matrix
* mult_regnames (default = True):
if reg_names is a list
(instead of the instance of an iterable at the higher level)
* spm_reg (default = True) : either use spm_mat_file or reg_txt
(the latter containing directly the weighting use for computing
weighted correlation)
Inputs (inputnode):
* resid_ts_file
* spm_mat_file (from a typical SPM level1 activation analysis)
* regress_names (names to look after in spm_mat_file)
* run_index
* ROI_labels_file
* reg_txt (if spm_reg = False)
Comments:
Typically used after previous pipeline (create_pipeline_nii_to_conmat)
and before conmat_to_graph pipeline
"""
pipeline = pe.Workflow(name=pipeline_name)
pipeline.base_dir = main_path
inputnode = pe.Node(niu.IdentityInterface(fields=[
'resid_ts_file', 'spm_mat_file', 'regress_names', 'run_index',
'ROI_labels_file', 'reg_txt']), name='inputnode')
if spm_reg:
if mult_regnames:
# extract regressor of interest from SPM.mat
extract_cond = pe.MapNode(interface=FindSPMRegressor(
only_positive_values=True), iterfield=['regressor_name'],
name='extract_cond')
pipeline.connect(inputnode, ('spm_mat_file', show_files),
extract_cond, 'spm_mat_file')
pipeline.connect(inputnode, 'regress_names',
extract_cond, 'regressor_name')
pipeline.connect(inputnode, 'run_index', extract_cond, 'run_index')
# extract_cond.inputs.run_index = 0
extract_cond.inputs.concatenated_runs = concatenated_runs
# compute weighted correlations
compute_conf_cor_mat = pe.MapNode(interface=ComputeConfCorMat(),
iterfield=['weight_file'],
name='compute_conf_cor_mat')
# with confidence interval
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(inputnode, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(extract_cond, 'regressor_file',
compute_conf_cor_mat, 'weight_file')
pipeline.connect(inputnode, 'ROI_labels_file',
compute_conf_cor_mat, 'labels_file')
else:
# extract regressor of interest from SPM.mat
extract_cond = pe.Node(interface=FindSPMRegressor(
only_positive_values=True), name='extract_cond')
pipeline.connect(inputnode, 'spm_mat_file',
extract_cond, 'spm_mat_file')
pipeline.connect(inputnode, 'regress_names',
extract_cond, 'regressor_name')
pipeline.connect(inputnode, 'run_index', extract_cond, 'run_index')
extract_cond.inputs.concatenated_runs = concatenated_runs
# compute weighted correlations
# confidence interval
compute_conf_cor_mat = pe.Node(
interface=ComputeConfCorMat(), name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(inputnode, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(extract_cond, 'regressor_file',
compute_conf_cor_mat, 'weight_file')
pipeline.connect(inputnode, 'ROI_labels_file',
compute_conf_cor_mat, 'labels_file')
else:
if mult_regnames:
compute_conf_cor_mat = pe.MapNode(interface=ComputeConfCorMat(),
iterfield=['weight_file'],
name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(inputnode, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(inputnode, 'reg_txt',
compute_conf_cor_mat, 'weight_file')
pipeline.connect(inputnode, 'ROI_labels_file',
compute_conf_cor_mat, 'labels_file')
else:
compute_conf_cor_mat = pe.Node(
interface=ComputeConfCorMat(), name='compute_conf_cor_mat')
compute_conf_cor_mat.inputs.conf_interval_prob = conf_interval_prob
pipeline.connect(inputnode, 'resid_ts_file',
compute_conf_cor_mat, 'ts_file')
pipeline.connect(inputnode, 'reg_txt',
compute_conf_cor_mat, 'weight_file')
pipeline.connect(inputnode, 'ROI_labels_file',
compute_conf_cor_mat, 'labels_file')
return pipeline
| 37.859903
| 79
| 0.66437
| 2,868
| 23,511
| 5.029637
| 0.075662
| 0.099827
| 0.108146
| 0.043258
| 0.884298
| 0.872305
| 0.838267
| 0.816291
| 0.809636
| 0.783986
| 0
| 0.003576
| 0.250691
| 23,511
| 620
| 80
| 37.920968
| 0.815235
| 0.160265
| 0
| 0.781711
| 0
| 0
| 0.200093
| 0.014403
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014749
| false
| 0
| 0.017699
| 0
| 0.047198
| 0.0059
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18b5fd8d67e610bcd7cd9cfba8e4807cbb5191d7
| 47
|
py
|
Python
|
nn/__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 23
|
2018-05-25T02:16:59.000Z
|
2022-03-24T06:56:34.000Z
|
nn/__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 1
|
2019-06-11T06:59:21.000Z
|
2019-06-11T06:59:40.000Z
|
nn/__init__.py
|
jinanloubani/aTEAM
|
0999799fafbdc36ae09cdd91d99a5a7316803143
|
[
"MIT"
] | 8
|
2018-08-29T16:43:12.000Z
|
2022-01-17T11:54:40.000Z
|
from . import modules
from . import functional
| 15.666667
| 24
| 0.787234
| 6
| 47
| 6.166667
| 0.666667
| 0.540541
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.170213
| 47
| 2
| 25
| 23.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
18c46ddd43d15e794e6d334cdb93d0f9a850b2bb
| 7,011
|
py
|
Python
|
tests/test_analyzing_utils.py
|
JulienPalard/habit-tracker
|
83cbf214417ab1b239ec2747ceac9d58a42b1426
|
[
"MIT"
] | null | null | null |
tests/test_analyzing_utils.py
|
JulienPalard/habit-tracker
|
83cbf214417ab1b239ec2747ceac9d58a42b1426
|
[
"MIT"
] | 2
|
2021-09-25T18:12:54.000Z
|
2021-10-04T11:17:50.000Z
|
tests/test_analyzing_utils.py
|
JulienPalard/habit-tracker
|
83cbf214417ab1b239ec2747ceac9d58a42b1426
|
[
"MIT"
] | 1
|
2021-09-25T15:51:42.000Z
|
2021-09-25T15:51:42.000Z
|
import unittest
from habits.analyze.utils import *
from datetime import datetime
class AnalyzingUtilsTestCase(unittest.TestCase):
def test_analyze_stream(self):
result = analyze_stream(
[
(True, 2, '2020-12-01', '2020-12-02'),
(False, 3, '2020-12-03', '2020-12-05'),
(True, 2, '2020-12-06', '2020-12-07'),
(False, 1, '2020-12-08', '2020-12-08'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
self.assertEqual(result['sum_complete_units'], 9)
self.assertEqual(result['sum_streak_units'], 5)
self.assertEqual(result['number_of_streaks'], 3)
self.assertEqual(result['number_of_breaks'], 2)
self.assertEqual(result['highest_streak'], (True, 2, '2020-12-01', '2020-12-02'))
result = analyze_stream([])
self.assertEqual({}, result)
def test_create_stream(self):
result = create_stream([
('2020-12-01', True),
('2020-12-02', True),
('2020-12-03', False),
('2020-12-04', False),
('2020-12-05', False),
('2020-12-06', True),
('2020-12-07', True),
('2020-12-08', False),
('2020-12-09', True),
])
self.assertEqual(
list(result),
[
(True, 2, '2020-12-01', '2020-12-02'),
(False, 3, '2020-12-03', '2020-12-05'),
(True, 2, '2020-12-06', '2020-12-07'),
(False, 1, '2020-12-08', '2020-12-08'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
# Testing empty list
result = create_stream([])
self.assertEqual(list(result), [])
# One row list
result = create_stream([
('2020-12-01', True),
])
self.assertEqual(
list(result),
[
(True, 1, '2020-12-01', '2020-12-01'),
]
)
def test_generate_streak_ids(self):
result = generate_streak_ids([
('2020-12-01', True),
('2020-12-02', True),
('2020-12-03', False),
('2020-12-04', False),
('2020-12-05', False),
('2020-12-06', True),
('2020-12-07', True),
('2020-12-08', False),
('2020-12-09', True),
])
self.assertEqual(
list(result),
[
('2020-12-01', True, 0),
('2020-12-02', True, 0),
('2020-12-03', False, 1),
('2020-12-04', False, 1),
('2020-12-05', False, 1),
('2020-12-06', True, 2),
('2020-12-07', True, 2),
('2020-12-08', False, 3),
('2020-12-09', True, 4),
]
)
# Testing empty list
result = generate_streak_ids([])
self.assertEqual(list(result), [])
# One row list
result = generate_streak_ids([
('2020-12-01', True),
])
self.assertEqual(
list(result),
[
('2020-12-01', True, 0),
]
)
def test_group_tracking(self):
result = group_tracking(
[
('2020-12-01', True, 0),
('2020-12-02', True, 0),
('2020-12-03', False, 1),
('2020-12-04', False, 1),
('2020-12-05', False, 1),
('2020-12-06', True, 2),
('2020-12-07', True, 2),
('2020-12-08', False, 3),
('2020-12-09', True, 4),
]
)
expected = [
[
('2020-12-01', True, 0), ('2020-12-02', True, 0),
],
[
('2020-12-03', False, 1), ('2020-12-04', False, 1), ('2020-12-05', False, 1),
],
[
('2020-12-06', True, 2), ('2020-12-07', True, 2),
],
[
('2020-12-08', False, 3),
],
[
('2020-12-09', True, 4),
]
]
index = 0
for item in result:
self.assertEqual(list(item), expected[index])
index = index + 1
def test_filter_streak(self):
result = filter_streaks(
[
(True, 2, '2020-12-01', '2020-12-02'),
(False, 3, '2020-12-03', '2020-12-05'),
(True, 2, '2020-12-06', '2020-12-07'),
(False, 1, '2020-12-08', '2020-12-08'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
self.assertEqual(
list(result),
[
(True, 2, '2020-12-01', '2020-12-02'),
(True, 2, '2020-12-06', '2020-12-07'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
def test_sum_units(self):
result = sum_units(
[
(True, 2, '2020-12-01', '2020-12-02'),
(False, 3, '2020-12-03', '2020-12-05'),
(True, 2, '2020-12-06', '2020-12-07'),
(False, 1, '2020-12-08', '2020-12-08'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
self.assertEqual(result, 9)
def test_highest_item(self):
result = highest_item(
[
(True, 2, '2020-12-01', '2020-12-02'),
(False, 3, '2020-12-03', '2020-12-05'),
(True, 2, '2020-12-06', '2020-12-07'),
(False, 1, '2020-12-08', '2020-12-08'),
(True, 1, '2020-12-09', '2020-12-09'),
]
)
self.assertEqual(result, (False, 3, '2020-12-03', '2020-12-05'))
def test_filter_and_sort_raw_data(self):
input_data = [
{"habit_id": 1, "created_at": datetime.strptime("2021-01-15 14:00:00", "%Y-%m-%d %H:%M:%S")},
{"habit_id": 1, "created_at": datetime.strptime("2021-01-15 14:01:00", "%Y-%m-%d %H:%M:%S")},
{"habit_id": 2, "created_at": datetime.strptime("2021-01-12 14:00:00", "%Y-%m-%d %H:%M:%S")},
{"habit_id": 2, "created_at": datetime.strptime("2021-01-12 14:01:00", "%Y-%m-%d %H:%M:%S")},
{"habit_id": 1, "created_at": datetime.strptime("2021-01-13 15:00:00", "%Y-%m-%d %H:%M:%S")},
{"habit_id": 1, "created_at": datetime.strptime("2021-01-13 15:01:00", "%Y-%m-%d %H:%M:%S")},
]
result = filter_and_sort_raw_data(1, input_data)
self.assertEqual(
result,
[
datetime.strptime("2021-01-13 15:00:00", "%Y-%m-%d %H:%M:%S"),
datetime.strptime("2021-01-13 15:01:00", "%Y-%m-%d %H:%M:%S"),
datetime.strptime("2021-01-15 14:00:00", "%Y-%m-%d %H:%M:%S"),
datetime.strptime("2021-01-15 14:01:00", "%Y-%m-%d %H:%M:%S")
]
)
| 33.227488
| 105
| 0.422051
| 849
| 7,011
| 3.41225
| 0.095406
| 0.227822
| 0.050742
| 0.072144
| 0.762858
| 0.714877
| 0.713497
| 0.703141
| 0.616155
| 0.616155
| 0
| 0.25225
| 0.381971
| 7,011
| 211
| 106
| 33.227488
| 0.41634
| 0.008986
| 0
| 0.508108
| 0
| 0
| 0.237471
| 0
| 0
| 0
| 0
| 0
| 0.091892
| 1
| 0.043243
| false
| 0
| 0.016216
| 0
| 0.064865
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18cfef410c8723b5a9f2fe9412c77ad1a5d02af4
| 33,725
|
py
|
Python
|
sdk/python/pulumi_aws/cfg/remediation_configuration.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/cfg/remediation_configuration.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/cfg/remediation_configuration.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RemediationConfigurationArgs', 'RemediationConfiguration']
@pulumi.input_type
class RemediationConfigurationArgs:
def __init__(__self__, *,
config_rule_name: pulumi.Input[str],
target_id: pulumi.Input[str],
target_type: pulumi.Input[str],
automatic: Optional[pulumi.Input[bool]] = None,
execution_controls: Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']] = None,
maximum_automatic_attempts: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
retry_attempt_seconds: Optional[pulumi.Input[int]] = None,
target_version: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a RemediationConfiguration resource.
:param pulumi.Input[str] config_rule_name: Name of the AWS Config rule.
:param pulumi.Input[str] target_id: Target ID is the name of the public document.
:param pulumi.Input[str] target_type: Type of the target. Target executes remediation. For example, SSM document.
:param pulumi.Input[bool] automatic: Remediation is triggered automatically if `true`.
:param pulumi.Input['RemediationConfigurationExecutionControlsArgs'] execution_controls: Configuration block for execution controls. See below.
:param pulumi.Input[int] maximum_automatic_attempts: Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
:param pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]] parameters: Can be specified multiple times for each parameter. Each parameter block supports arguments below.
:param pulumi.Input[str] resource_type: Type of resource.
:param pulumi.Input[int] retry_attempt_seconds: Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
:param pulumi.Input[str] target_version: Version of the target. For example, version of the SSM document
"""
pulumi.set(__self__, "config_rule_name", config_rule_name)
pulumi.set(__self__, "target_id", target_id)
pulumi.set(__self__, "target_type", target_type)
if automatic is not None:
pulumi.set(__self__, "automatic", automatic)
if execution_controls is not None:
pulumi.set(__self__, "execution_controls", execution_controls)
if maximum_automatic_attempts is not None:
pulumi.set(__self__, "maximum_automatic_attempts", maximum_automatic_attempts)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
if retry_attempt_seconds is not None:
pulumi.set(__self__, "retry_attempt_seconds", retry_attempt_seconds)
if target_version is not None:
pulumi.set(__self__, "target_version", target_version)
@property
@pulumi.getter(name="configRuleName")
def config_rule_name(self) -> pulumi.Input[str]:
"""
Name of the AWS Config rule.
"""
return pulumi.get(self, "config_rule_name")
@config_rule_name.setter
def config_rule_name(self, value: pulumi.Input[str]):
pulumi.set(self, "config_rule_name", value)
@property
@pulumi.getter(name="targetId")
def target_id(self) -> pulumi.Input[str]:
"""
Target ID is the name of the public document.
"""
return pulumi.get(self, "target_id")
@target_id.setter
def target_id(self, value: pulumi.Input[str]):
pulumi.set(self, "target_id", value)
@property
@pulumi.getter(name="targetType")
def target_type(self) -> pulumi.Input[str]:
"""
Type of the target. Target executes remediation. For example, SSM document.
"""
return pulumi.get(self, "target_type")
@target_type.setter
def target_type(self, value: pulumi.Input[str]):
pulumi.set(self, "target_type", value)
@property
@pulumi.getter
def automatic(self) -> Optional[pulumi.Input[bool]]:
"""
Remediation is triggered automatically if `true`.
"""
return pulumi.get(self, "automatic")
@automatic.setter
def automatic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "automatic", value)
@property
@pulumi.getter(name="executionControls")
def execution_controls(self) -> Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']]:
"""
Configuration block for execution controls. See below.
"""
return pulumi.get(self, "execution_controls")
@execution_controls.setter
def execution_controls(self, value: Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']]):
pulumi.set(self, "execution_controls", value)
@property
@pulumi.getter(name="maximumAutomaticAttempts")
def maximum_automatic_attempts(self) -> Optional[pulumi.Input[int]]:
"""
Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
"""
return pulumi.get(self, "maximum_automatic_attempts")
@maximum_automatic_attempts.setter
def maximum_automatic_attempts(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "maximum_automatic_attempts", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]]:
"""
Can be specified multiple times for each parameter. Each parameter block supports arguments below.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource.
"""
return pulumi.get(self, "resource_type")
@resource_type.setter
def resource_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_type", value)
@property
@pulumi.getter(name="retryAttemptSeconds")
def retry_attempt_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
"""
return pulumi.get(self, "retry_attempt_seconds")
@retry_attempt_seconds.setter
def retry_attempt_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retry_attempt_seconds", value)
@property
@pulumi.getter(name="targetVersion")
def target_version(self) -> Optional[pulumi.Input[str]]:
"""
Version of the target. For example, version of the SSM document
"""
return pulumi.get(self, "target_version")
@target_version.setter
def target_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_version", value)
@pulumi.input_type
class _RemediationConfigurationState:
def __init__(__self__, *,
arn: Optional[pulumi.Input[str]] = None,
automatic: Optional[pulumi.Input[bool]] = None,
config_rule_name: Optional[pulumi.Input[str]] = None,
execution_controls: Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']] = None,
maximum_automatic_attempts: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
retry_attempt_seconds: Optional[pulumi.Input[int]] = None,
target_id: Optional[pulumi.Input[str]] = None,
target_type: Optional[pulumi.Input[str]] = None,
target_version: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering RemediationConfiguration resources.
:param pulumi.Input[str] arn: ARN of the Config Remediation Configuration.
:param pulumi.Input[bool] automatic: Remediation is triggered automatically if `true`.
:param pulumi.Input[str] config_rule_name: Name of the AWS Config rule.
:param pulumi.Input['RemediationConfigurationExecutionControlsArgs'] execution_controls: Configuration block for execution controls. See below.
:param pulumi.Input[int] maximum_automatic_attempts: Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
:param pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]] parameters: Can be specified multiple times for each parameter. Each parameter block supports arguments below.
:param pulumi.Input[str] resource_type: Type of resource.
:param pulumi.Input[int] retry_attempt_seconds: Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
:param pulumi.Input[str] target_id: Target ID is the name of the public document.
:param pulumi.Input[str] target_type: Type of the target. Target executes remediation. For example, SSM document.
:param pulumi.Input[str] target_version: Version of the target. For example, version of the SSM document
"""
if arn is not None:
pulumi.set(__self__, "arn", arn)
if automatic is not None:
pulumi.set(__self__, "automatic", automatic)
if config_rule_name is not None:
pulumi.set(__self__, "config_rule_name", config_rule_name)
if execution_controls is not None:
pulumi.set(__self__, "execution_controls", execution_controls)
if maximum_automatic_attempts is not None:
pulumi.set(__self__, "maximum_automatic_attempts", maximum_automatic_attempts)
if parameters is not None:
pulumi.set(__self__, "parameters", parameters)
if resource_type is not None:
pulumi.set(__self__, "resource_type", resource_type)
if retry_attempt_seconds is not None:
pulumi.set(__self__, "retry_attempt_seconds", retry_attempt_seconds)
if target_id is not None:
pulumi.set(__self__, "target_id", target_id)
if target_type is not None:
pulumi.set(__self__, "target_type", target_type)
if target_version is not None:
pulumi.set(__self__, "target_version", target_version)
@property
@pulumi.getter
def arn(self) -> Optional[pulumi.Input[str]]:
"""
ARN of the Config Remediation Configuration.
"""
return pulumi.get(self, "arn")
@arn.setter
def arn(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "arn", value)
@property
@pulumi.getter
def automatic(self) -> Optional[pulumi.Input[bool]]:
"""
Remediation is triggered automatically if `true`.
"""
return pulumi.get(self, "automatic")
@automatic.setter
def automatic(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "automatic", value)
@property
@pulumi.getter(name="configRuleName")
def config_rule_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the AWS Config rule.
"""
return pulumi.get(self, "config_rule_name")
@config_rule_name.setter
def config_rule_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "config_rule_name", value)
@property
@pulumi.getter(name="executionControls")
def execution_controls(self) -> Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']]:
"""
Configuration block for execution controls. See below.
"""
return pulumi.get(self, "execution_controls")
@execution_controls.setter
def execution_controls(self, value: Optional[pulumi.Input['RemediationConfigurationExecutionControlsArgs']]):
pulumi.set(self, "execution_controls", value)
@property
@pulumi.getter(name="maximumAutomaticAttempts")
def maximum_automatic_attempts(self) -> Optional[pulumi.Input[int]]:
"""
Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
"""
return pulumi.get(self, "maximum_automatic_attempts")
@maximum_automatic_attempts.setter
def maximum_automatic_attempts(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "maximum_automatic_attempts", value)
@property
@pulumi.getter
def parameters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]]:
"""
Can be specified multiple times for each parameter. Each parameter block supports arguments below.
"""
return pulumi.get(self, "parameters")
@parameters.setter
def parameters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['RemediationConfigurationParameterArgs']]]]):
pulumi.set(self, "parameters", value)
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of resource.
"""
return pulumi.get(self, "resource_type")
@resource_type.setter
def resource_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_type", value)
@property
@pulumi.getter(name="retryAttemptSeconds")
def retry_attempt_seconds(self) -> Optional[pulumi.Input[int]]:
"""
Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
"""
return pulumi.get(self, "retry_attempt_seconds")
@retry_attempt_seconds.setter
def retry_attempt_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retry_attempt_seconds", value)
@property
@pulumi.getter(name="targetId")
def target_id(self) -> Optional[pulumi.Input[str]]:
"""
Target ID is the name of the public document.
"""
return pulumi.get(self, "target_id")
@target_id.setter
def target_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_id", value)
@property
@pulumi.getter(name="targetType")
def target_type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the target. Target executes remediation. For example, SSM document.
"""
return pulumi.get(self, "target_type")
@target_type.setter
def target_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_type", value)
@property
@pulumi.getter(name="targetVersion")
def target_version(self) -> Optional[pulumi.Input[str]]:
"""
Version of the target. For example, version of the SSM document
"""
return pulumi.get(self, "target_version")
@target_version.setter
def target_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "target_version", value)
class RemediationConfiguration(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automatic: Optional[pulumi.Input[bool]] = None,
config_rule_name: Optional[pulumi.Input[str]] = None,
execution_controls: Optional[pulumi.Input[pulumi.InputType['RemediationConfigurationExecutionControlsArgs']]] = None,
maximum_automatic_attempts: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RemediationConfigurationParameterArgs']]]]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
retry_attempt_seconds: Optional[pulumi.Input[int]] = None,
target_id: Optional[pulumi.Input[str]] = None,
target_type: Optional[pulumi.Input[str]] = None,
target_version: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides an AWS Config Remediation Configuration.
> **Note:** Config Remediation Configuration requires an existing Config Rule to be present.
## Example Usage
AWS managed rules can be used by setting the source owner to `AWS` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html).
```python
import pulumi
import pulumi_aws as aws
this_rule = aws.cfg.Rule("thisRule", source=aws.cfg.RuleSourceArgs(
owner="AWS",
source_identifier="S3_BUCKET_VERSIONING_ENABLED",
))
this_remediation_configuration = aws.cfg.RemediationConfiguration("thisRemediationConfiguration",
config_rule_name=this_rule.name,
resource_type="AWS::S3::Bucket",
target_type="SSM_DOCUMENT",
target_id="AWS-EnableS3BucketEncryption",
target_version="1",
parameters=[
aws.cfg.RemediationConfigurationParameterArgs(
name="AutomationAssumeRole",
static_value="arn:aws:iam::875924563244:role/security_config",
),
aws.cfg.RemediationConfigurationParameterArgs(
name="BucketName",
resource_value="RESOURCE_ID",
),
aws.cfg.RemediationConfigurationParameterArgs(
name="SSEAlgorithm",
static_value="AES256",
),
],
automatic=True,
maximum_automatic_attempts=10,
retry_attempt_seconds=600,
execution_controls=aws.cfg.RemediationConfigurationExecutionControlsArgs(
ssm_controls=aws.cfg.RemediationConfigurationExecutionControlsSsmControlsArgs(
concurrent_execution_rate_percentage=25,
error_percentage=20,
),
))
```
## Import
Remediation Configurations can be imported using the name config_rule_name, e.g.,
```sh
$ pulumi import aws:cfg/remediationConfiguration:RemediationConfiguration this example
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] automatic: Remediation is triggered automatically if `true`.
:param pulumi.Input[str] config_rule_name: Name of the AWS Config rule.
:param pulumi.Input[pulumi.InputType['RemediationConfigurationExecutionControlsArgs']] execution_controls: Configuration block for execution controls. See below.
:param pulumi.Input[int] maximum_automatic_attempts: Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RemediationConfigurationParameterArgs']]]] parameters: Can be specified multiple times for each parameter. Each parameter block supports arguments below.
:param pulumi.Input[str] resource_type: Type of resource.
:param pulumi.Input[int] retry_attempt_seconds: Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
:param pulumi.Input[str] target_id: Target ID is the name of the public document.
:param pulumi.Input[str] target_type: Type of the target. Target executes remediation. For example, SSM document.
:param pulumi.Input[str] target_version: Version of the target. For example, version of the SSM document
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RemediationConfigurationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an AWS Config Remediation Configuration.
> **Note:** Config Remediation Configuration requires an existing Config Rule to be present.
## Example Usage
AWS managed rules can be used by setting the source owner to `AWS` and the source identifier to the name of the managed rule. More information about AWS managed rules can be found in the [AWS Config Developer Guide](https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_use-managed-rules.html).
```python
import pulumi
import pulumi_aws as aws
this_rule = aws.cfg.Rule("thisRule", source=aws.cfg.RuleSourceArgs(
owner="AWS",
source_identifier="S3_BUCKET_VERSIONING_ENABLED",
))
this_remediation_configuration = aws.cfg.RemediationConfiguration("thisRemediationConfiguration",
config_rule_name=this_rule.name,
resource_type="AWS::S3::Bucket",
target_type="SSM_DOCUMENT",
target_id="AWS-EnableS3BucketEncryption",
target_version="1",
parameters=[
aws.cfg.RemediationConfigurationParameterArgs(
name="AutomationAssumeRole",
static_value="arn:aws:iam::875924563244:role/security_config",
),
aws.cfg.RemediationConfigurationParameterArgs(
name="BucketName",
resource_value="RESOURCE_ID",
),
aws.cfg.RemediationConfigurationParameterArgs(
name="SSEAlgorithm",
static_value="AES256",
),
],
automatic=True,
maximum_automatic_attempts=10,
retry_attempt_seconds=600,
execution_controls=aws.cfg.RemediationConfigurationExecutionControlsArgs(
ssm_controls=aws.cfg.RemediationConfigurationExecutionControlsSsmControlsArgs(
concurrent_execution_rate_percentage=25,
error_percentage=20,
),
))
```
## Import
Remediation Configurations can be imported using the name config_rule_name, e.g.,
```sh
$ pulumi import aws:cfg/remediationConfiguration:RemediationConfiguration this example
```
:param str resource_name: The name of the resource.
:param RemediationConfigurationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RemediationConfigurationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
automatic: Optional[pulumi.Input[bool]] = None,
config_rule_name: Optional[pulumi.Input[str]] = None,
execution_controls: Optional[pulumi.Input[pulumi.InputType['RemediationConfigurationExecutionControlsArgs']]] = None,
maximum_automatic_attempts: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RemediationConfigurationParameterArgs']]]]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
retry_attempt_seconds: Optional[pulumi.Input[int]] = None,
target_id: Optional[pulumi.Input[str]] = None,
target_type: Optional[pulumi.Input[str]] = None,
target_version: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RemediationConfigurationArgs.__new__(RemediationConfigurationArgs)
__props__.__dict__["automatic"] = automatic
if config_rule_name is None and not opts.urn:
raise TypeError("Missing required property 'config_rule_name'")
__props__.__dict__["config_rule_name"] = config_rule_name
__props__.__dict__["execution_controls"] = execution_controls
__props__.__dict__["maximum_automatic_attempts"] = maximum_automatic_attempts
__props__.__dict__["parameters"] = parameters
__props__.__dict__["resource_type"] = resource_type
__props__.__dict__["retry_attempt_seconds"] = retry_attempt_seconds
if target_id is None and not opts.urn:
raise TypeError("Missing required property 'target_id'")
__props__.__dict__["target_id"] = target_id
if target_type is None and not opts.urn:
raise TypeError("Missing required property 'target_type'")
__props__.__dict__["target_type"] = target_type
__props__.__dict__["target_version"] = target_version
__props__.__dict__["arn"] = None
super(RemediationConfiguration, __self__).__init__(
'aws:cfg/remediationConfiguration:RemediationConfiguration',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
arn: Optional[pulumi.Input[str]] = None,
automatic: Optional[pulumi.Input[bool]] = None,
config_rule_name: Optional[pulumi.Input[str]] = None,
execution_controls: Optional[pulumi.Input[pulumi.InputType['RemediationConfigurationExecutionControlsArgs']]] = None,
maximum_automatic_attempts: Optional[pulumi.Input[int]] = None,
parameters: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RemediationConfigurationParameterArgs']]]]] = None,
resource_type: Optional[pulumi.Input[str]] = None,
retry_attempt_seconds: Optional[pulumi.Input[int]] = None,
target_id: Optional[pulumi.Input[str]] = None,
target_type: Optional[pulumi.Input[str]] = None,
target_version: Optional[pulumi.Input[str]] = None) -> 'RemediationConfiguration':
"""
Get an existing RemediationConfiguration resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] arn: ARN of the Config Remediation Configuration.
:param pulumi.Input[bool] automatic: Remediation is triggered automatically if `true`.
:param pulumi.Input[str] config_rule_name: Name of the AWS Config rule.
:param pulumi.Input[pulumi.InputType['RemediationConfigurationExecutionControlsArgs']] execution_controls: Configuration block for execution controls. See below.
:param pulumi.Input[int] maximum_automatic_attempts: Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RemediationConfigurationParameterArgs']]]] parameters: Can be specified multiple times for each parameter. Each parameter block supports arguments below.
:param pulumi.Input[str] resource_type: Type of resource.
:param pulumi.Input[int] retry_attempt_seconds: Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
:param pulumi.Input[str] target_id: Target ID is the name of the public document.
:param pulumi.Input[str] target_type: Type of the target. Target executes remediation. For example, SSM document.
:param pulumi.Input[str] target_version: Version of the target. For example, version of the SSM document
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RemediationConfigurationState.__new__(_RemediationConfigurationState)
__props__.__dict__["arn"] = arn
__props__.__dict__["automatic"] = automatic
__props__.__dict__["config_rule_name"] = config_rule_name
__props__.__dict__["execution_controls"] = execution_controls
__props__.__dict__["maximum_automatic_attempts"] = maximum_automatic_attempts
__props__.__dict__["parameters"] = parameters
__props__.__dict__["resource_type"] = resource_type
__props__.__dict__["retry_attempt_seconds"] = retry_attempt_seconds
__props__.__dict__["target_id"] = target_id
__props__.__dict__["target_type"] = target_type
__props__.__dict__["target_version"] = target_version
return RemediationConfiguration(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def arn(self) -> pulumi.Output[str]:
"""
ARN of the Config Remediation Configuration.
"""
return pulumi.get(self, "arn")
@property
@pulumi.getter
def automatic(self) -> pulumi.Output[Optional[bool]]:
"""
Remediation is triggered automatically if `true`.
"""
return pulumi.get(self, "automatic")
@property
@pulumi.getter(name="configRuleName")
def config_rule_name(self) -> pulumi.Output[str]:
"""
Name of the AWS Config rule.
"""
return pulumi.get(self, "config_rule_name")
@property
@pulumi.getter(name="executionControls")
def execution_controls(self) -> pulumi.Output[Optional['outputs.RemediationConfigurationExecutionControls']]:
"""
Configuration block for execution controls. See below.
"""
return pulumi.get(self, "execution_controls")
@property
@pulumi.getter(name="maximumAutomaticAttempts")
def maximum_automatic_attempts(self) -> pulumi.Output[Optional[int]]:
"""
Maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5.
"""
return pulumi.get(self, "maximum_automatic_attempts")
@property
@pulumi.getter
def parameters(self) -> pulumi.Output[Optional[Sequence['outputs.RemediationConfigurationParameter']]]:
"""
Can be specified multiple times for each parameter. Each parameter block supports arguments below.
"""
return pulumi.get(self, "parameters")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> pulumi.Output[Optional[str]]:
"""
Type of resource.
"""
return pulumi.get(self, "resource_type")
@property
@pulumi.getter(name="retryAttemptSeconds")
def retry_attempt_seconds(self) -> pulumi.Output[Optional[int]]:
"""
Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds.
"""
return pulumi.get(self, "retry_attempt_seconds")
@property
@pulumi.getter(name="targetId")
def target_id(self) -> pulumi.Output[str]:
"""
Target ID is the name of the public document.
"""
return pulumi.get(self, "target_id")
@property
@pulumi.getter(name="targetType")
def target_type(self) -> pulumi.Output[str]:
"""
Type of the target. Target executes remediation. For example, SSM document.
"""
return pulumi.get(self, "target_type")
@property
@pulumi.getter(name="targetVersion")
def target_version(self) -> pulumi.Output[Optional[str]]:
"""
Version of the target. For example, version of the SSM document
"""
return pulumi.get(self, "target_version")
| 47.5
| 321
| 0.668406
| 3,683
| 33,725
| 5.898181
| 0.06625
| 0.077475
| 0.074345
| 0.04051
| 0.900612
| 0.887354
| 0.874097
| 0.86558
| 0.855637
| 0.84132
| 0
| 0.003028
| 0.236086
| 33,725
| 709
| 322
| 47.566996
| 0.840158
| 0.356294
| 0
| 0.769648
| 1
| 0
| 0.148535
| 0.073811
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162602
| false
| 0.00271
| 0.01897
| 0
| 0.279133
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
18f6d2b58761190e2fe24b245945563709aab8e0
| 6,269
|
py
|
Python
|
loldib/getratings/models/NA/na_ekko/na_ekko_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_ekko/na_ekko_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_ekko/na_ekko_mid.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Ekko_Mid_Aatrox(Ratings):
pass
class NA_Ekko_Mid_Ahri(Ratings):
pass
class NA_Ekko_Mid_Akali(Ratings):
pass
class NA_Ekko_Mid_Alistar(Ratings):
pass
class NA_Ekko_Mid_Amumu(Ratings):
pass
class NA_Ekko_Mid_Anivia(Ratings):
pass
class NA_Ekko_Mid_Annie(Ratings):
pass
class NA_Ekko_Mid_Ashe(Ratings):
pass
class NA_Ekko_Mid_AurelionSol(Ratings):
pass
class NA_Ekko_Mid_Azir(Ratings):
pass
class NA_Ekko_Mid_Bard(Ratings):
pass
class NA_Ekko_Mid_Blitzcrank(Ratings):
pass
class NA_Ekko_Mid_Brand(Ratings):
pass
class NA_Ekko_Mid_Braum(Ratings):
pass
class NA_Ekko_Mid_Caitlyn(Ratings):
pass
class NA_Ekko_Mid_Camille(Ratings):
pass
class NA_Ekko_Mid_Cassiopeia(Ratings):
pass
class NA_Ekko_Mid_Chogath(Ratings):
pass
class NA_Ekko_Mid_Corki(Ratings):
pass
class NA_Ekko_Mid_Darius(Ratings):
pass
class NA_Ekko_Mid_Diana(Ratings):
pass
class NA_Ekko_Mid_Draven(Ratings):
pass
class NA_Ekko_Mid_DrMundo(Ratings):
pass
class NA_Ekko_Mid_Ekko(Ratings):
pass
class NA_Ekko_Mid_Elise(Ratings):
pass
class NA_Ekko_Mid_Evelynn(Ratings):
pass
class NA_Ekko_Mid_Ezreal(Ratings):
pass
class NA_Ekko_Mid_Fiddlesticks(Ratings):
pass
class NA_Ekko_Mid_Fiora(Ratings):
pass
class NA_Ekko_Mid_Fizz(Ratings):
pass
class NA_Ekko_Mid_Galio(Ratings):
pass
class NA_Ekko_Mid_Gangplank(Ratings):
pass
class NA_Ekko_Mid_Garen(Ratings):
pass
class NA_Ekko_Mid_Gnar(Ratings):
pass
class NA_Ekko_Mid_Gragas(Ratings):
pass
class NA_Ekko_Mid_Graves(Ratings):
pass
class NA_Ekko_Mid_Hecarim(Ratings):
pass
class NA_Ekko_Mid_Heimerdinger(Ratings):
pass
class NA_Ekko_Mid_Illaoi(Ratings):
pass
class NA_Ekko_Mid_Irelia(Ratings):
pass
class NA_Ekko_Mid_Ivern(Ratings):
pass
class NA_Ekko_Mid_Janna(Ratings):
pass
class NA_Ekko_Mid_JarvanIV(Ratings):
pass
class NA_Ekko_Mid_Jax(Ratings):
pass
class NA_Ekko_Mid_Jayce(Ratings):
pass
class NA_Ekko_Mid_Jhin(Ratings):
pass
class NA_Ekko_Mid_Jinx(Ratings):
pass
class NA_Ekko_Mid_Kalista(Ratings):
pass
class NA_Ekko_Mid_Karma(Ratings):
pass
class NA_Ekko_Mid_Karthus(Ratings):
pass
class NA_Ekko_Mid_Kassadin(Ratings):
pass
class NA_Ekko_Mid_Katarina(Ratings):
pass
class NA_Ekko_Mid_Kayle(Ratings):
pass
class NA_Ekko_Mid_Kayn(Ratings):
pass
class NA_Ekko_Mid_Kennen(Ratings):
pass
class NA_Ekko_Mid_Khazix(Ratings):
pass
class NA_Ekko_Mid_Kindred(Ratings):
pass
class NA_Ekko_Mid_Kled(Ratings):
pass
class NA_Ekko_Mid_KogMaw(Ratings):
pass
class NA_Ekko_Mid_Leblanc(Ratings):
pass
class NA_Ekko_Mid_LeeSin(Ratings):
pass
class NA_Ekko_Mid_Leona(Ratings):
pass
class NA_Ekko_Mid_Lissandra(Ratings):
pass
class NA_Ekko_Mid_Lucian(Ratings):
pass
class NA_Ekko_Mid_Lulu(Ratings):
pass
class NA_Ekko_Mid_Lux(Ratings):
pass
class NA_Ekko_Mid_Malphite(Ratings):
pass
class NA_Ekko_Mid_Malzahar(Ratings):
pass
class NA_Ekko_Mid_Maokai(Ratings):
pass
class NA_Ekko_Mid_MasterYi(Ratings):
pass
class NA_Ekko_Mid_MissFortune(Ratings):
pass
class NA_Ekko_Mid_MonkeyKing(Ratings):
pass
class NA_Ekko_Mid_Mordekaiser(Ratings):
pass
class NA_Ekko_Mid_Morgana(Ratings):
pass
class NA_Ekko_Mid_Nami(Ratings):
pass
class NA_Ekko_Mid_Nasus(Ratings):
pass
class NA_Ekko_Mid_Nautilus(Ratings):
pass
class NA_Ekko_Mid_Nidalee(Ratings):
pass
class NA_Ekko_Mid_Nocturne(Ratings):
pass
class NA_Ekko_Mid_Nunu(Ratings):
pass
class NA_Ekko_Mid_Olaf(Ratings):
pass
class NA_Ekko_Mid_Orianna(Ratings):
pass
class NA_Ekko_Mid_Ornn(Ratings):
pass
class NA_Ekko_Mid_Pantheon(Ratings):
pass
class NA_Ekko_Mid_Poppy(Ratings):
pass
class NA_Ekko_Mid_Quinn(Ratings):
pass
class NA_Ekko_Mid_Rakan(Ratings):
pass
class NA_Ekko_Mid_Rammus(Ratings):
pass
class NA_Ekko_Mid_RekSai(Ratings):
pass
class NA_Ekko_Mid_Renekton(Ratings):
pass
class NA_Ekko_Mid_Rengar(Ratings):
pass
class NA_Ekko_Mid_Riven(Ratings):
pass
class NA_Ekko_Mid_Rumble(Ratings):
pass
class NA_Ekko_Mid_Ryze(Ratings):
pass
class NA_Ekko_Mid_Sejuani(Ratings):
pass
class NA_Ekko_Mid_Shaco(Ratings):
pass
class NA_Ekko_Mid_Shen(Ratings):
pass
class NA_Ekko_Mid_Shyvana(Ratings):
pass
class NA_Ekko_Mid_Singed(Ratings):
pass
class NA_Ekko_Mid_Sion(Ratings):
pass
class NA_Ekko_Mid_Sivir(Ratings):
pass
class NA_Ekko_Mid_Skarner(Ratings):
pass
class NA_Ekko_Mid_Sona(Ratings):
pass
class NA_Ekko_Mid_Soraka(Ratings):
pass
class NA_Ekko_Mid_Swain(Ratings):
pass
class NA_Ekko_Mid_Syndra(Ratings):
pass
class NA_Ekko_Mid_TahmKench(Ratings):
pass
class NA_Ekko_Mid_Taliyah(Ratings):
pass
class NA_Ekko_Mid_Talon(Ratings):
pass
class NA_Ekko_Mid_Taric(Ratings):
pass
class NA_Ekko_Mid_Teemo(Ratings):
pass
class NA_Ekko_Mid_Thresh(Ratings):
pass
class NA_Ekko_Mid_Tristana(Ratings):
pass
class NA_Ekko_Mid_Trundle(Ratings):
pass
class NA_Ekko_Mid_Tryndamere(Ratings):
pass
class NA_Ekko_Mid_TwistedFate(Ratings):
pass
class NA_Ekko_Mid_Twitch(Ratings):
pass
class NA_Ekko_Mid_Udyr(Ratings):
pass
class NA_Ekko_Mid_Urgot(Ratings):
pass
class NA_Ekko_Mid_Varus(Ratings):
pass
class NA_Ekko_Mid_Vayne(Ratings):
pass
class NA_Ekko_Mid_Veigar(Ratings):
pass
class NA_Ekko_Mid_Velkoz(Ratings):
pass
class NA_Ekko_Mid_Vi(Ratings):
pass
class NA_Ekko_Mid_Viktor(Ratings):
pass
class NA_Ekko_Mid_Vladimir(Ratings):
pass
class NA_Ekko_Mid_Volibear(Ratings):
pass
class NA_Ekko_Mid_Warwick(Ratings):
pass
class NA_Ekko_Mid_Xayah(Ratings):
pass
class NA_Ekko_Mid_Xerath(Ratings):
pass
class NA_Ekko_Mid_XinZhao(Ratings):
pass
class NA_Ekko_Mid_Yasuo(Ratings):
pass
class NA_Ekko_Mid_Yorick(Ratings):
pass
class NA_Ekko_Mid_Zac(Ratings):
pass
class NA_Ekko_Mid_Zed(Ratings):
pass
class NA_Ekko_Mid_Ziggs(Ratings):
pass
class NA_Ekko_Mid_Zilean(Ratings):
pass
class NA_Ekko_Mid_Zyra(Ratings):
pass
| 15.033573
| 46
| 0.75642
| 972
| 6,269
| 4.452675
| 0.151235
| 0.223198
| 0.350739
| 0.446396
| 0.791359
| 0.791359
| 0
| 0
| 0
| 0
| 0
| 0
| 0.177221
| 6,269
| 416
| 47
| 15.069712
| 0.839085
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e1411eabc560ed45b60dc48e362e428820245d99
| 220
|
py
|
Python
|
whatthefood/data/preprocessing/__init__.py
|
lychanl/WhatTheFood
|
94b6eec2c306e7e55b19395cde207d6e6beec7fe
|
[
"MIT"
] | null | null | null |
whatthefood/data/preprocessing/__init__.py
|
lychanl/WhatTheFood
|
94b6eec2c306e7e55b19395cde207d6e6beec7fe
|
[
"MIT"
] | null | null | null |
whatthefood/data/preprocessing/__init__.py
|
lychanl/WhatTheFood
|
94b6eec2c306e7e55b19395cde207d6e6beec7fe
|
[
"MIT"
] | null | null | null |
from whatthefood.data.preprocessing.preprocessor import Preprocessor
from whatthefood.data.preprocessing.scaling import ScalePreprocessor
from whatthefood.data.preprocessing.mb_samples_preprocessing import yolo_flip_out
| 55
| 81
| 0.904545
| 25
| 220
| 7.8
| 0.52
| 0.230769
| 0.292308
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.054545
| 220
| 3
| 82
| 73.333333
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e14f23697e8d1781dc02081fe44a6841b5f9cf0e
| 6,299
|
py
|
Python
|
STATIC_ROUTING/add_ipv4.py
|
arista-netdevops-community/arista_eos_automation_with_ncclient
|
6aa1a847a79f8230f84a135c959f155c685af107
|
[
"Apache-2.0"
] | 5
|
2020-09-19T05:53:19.000Z
|
2022-01-09T13:18:00.000Z
|
STATIC_ROUTING/add_ipv4.py
|
arista-netdevops-community/arista_eos_automation_with_ncclient
|
6aa1a847a79f8230f84a135c959f155c685af107
|
[
"Apache-2.0"
] | null | null | null |
STATIC_ROUTING/add_ipv4.py
|
arista-netdevops-community/arista_eos_automation_with_ncclient
|
6aa1a847a79f8230f84a135c959f155c685af107
|
[
"Apache-2.0"
] | 3
|
2020-07-07T16:23:51.000Z
|
2020-12-23T15:37:17.000Z
|
from ncclient import manager
eos=manager.connect(host="10.81.108.236", port="22", timeout=30, username="cvpadmin", password="arista", hostkey_verify=False)
###################################################################################
# #
# This example will create an IPv4 static route similar to following on EOS #
# #
# ip route 11.11.11.11/32 10.81.108.193 #
# #
###################################################################################
default_VRF_conf = '''
<config>
<network-instances>
<network-instance>
<name>default</name>
<protocols>
<protocol>
<identifier>STATIC</identifier>
<name>STATIC</name>
<config>
<identifier>STATIC</identifier>
<name>STATIC</name>
</config>
<static-routes>
<static>
<prefix>11.11.11.11/32</prefix>
<config>
<prefix>11.11.11.11/32</prefix>
</config>
<next-hops>
<next-hop>
<index>AUTO_1_10-81-108-193</index>
<config>
<index>AUTO_1_10-81-108-193</index>
<metric>1</metric>
<next-hop>10.81.108.193</next-hop>
</config>
</next-hop>
</next-hops>
</static>
</static-routes>
</protocol>
</protocols>
</network-instance>
</network-instances>
</config>
'''
###################################################################################
# #
# This example will create an IPv4 static route similar to following on EOS #
# #
# ip route vrf management 11.11.11.11/32 10.85.128.1 #
# #
###################################################################################
non_default_VRF_conf = '''
<config>
<network-instances>
<network-instance>
<name>management</name>
<protocols>
<protocol>
<identifier>STATIC</identifier>
<name>STATIC</name>
<config>
<identifier>STATIC</identifier>
<name>STATIC</name>
</config>
<static-routes>
<static>
<prefix>11.11.11.11/32</prefix>
<config>
<prefix>11.11.11.11/32</prefix>
</config>
<next-hops>
<next-hop>
<index>AUTO_1_11.11.11.11</index>
<config>
<index>AUTO_1_11.11.11.11</index>
<metric>1</metric>
<next-hop>10.85.128.1</next-hop>
</config>
</next-hop>
</next-hops>
</static>
</static-routes>
</protocol>
</protocols>
</network-instance>
</network-instances>
</config>
'''
###################################################################################
# #
# This example will create an IPv4 static route similar to following on EOS #
# #
# ip route vrf management 11.11.11.11/32 Null0 #
# #
###################################################################################
null0_route = '''
<config>
<network-instances>
<network-instance>
<name>management</name>
<protocols>
<protocol>
<identifier>STATIC</identifier>
<name>STATIC</name>
<config>
<identifier>STATIC</identifier>
<name>STATIC</name>
</config>
<static-routes>
<static>
<prefix>11.11.11.11/32</prefix>
<config>
<prefix>11.11.11.11/32</prefix>
</config>
<next-hops>
<next-hop>
<index>AUTO_DROP_ROUTE</index>
<config>
<index>AUTO_DROP_ROUTE</index>
<metric>1</metric>
<next-hop>DROP</next-hop>
</config>
</next-hop>
</next-hops>
</static>
</static-routes>
</protocol>
</protocols>
</network-instance>
</network-instances>
</config>
'''
configuration = eos.edit_config(target = "running", config = null0_route, default_operation="merge")
print(configuration)
eos.close_session()
| 43.743056
| 126
| 0.297349
| 383
| 6,299
| 4.830287
| 0.190601
| 0.071351
| 0.071351
| 0.047568
| 0.842703
| 0.818919
| 0.797838
| 0.774054
| 0.732973
| 0.695676
| 0
| 0.065015
| 0.538498
| 6,299
| 144
| 127
| 43.743056
| 0.571379
| 0.161295
| 0
| 0.896552
| 0
| 0
| 0.920562
| 0.154707
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.008621
| 0.008621
| 0
| 0.008621
| 0.008621
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e185243e04173667b2313ad6c23272934d10bae5
| 106
|
py
|
Python
|
lib/config_utils/__init__.py
|
FanKuan44/TENAS
|
eb70174626649ad2852924ca28c4cabe25fe03d4
|
[
"MIT"
] | 138
|
2021-01-12T19:43:51.000Z
|
2022-03-28T02:30:58.000Z
|
util/net/config_utils/__init__.py
|
ELO-Lab/MD-MOENAS
|
edd6ec8c3f89cfbe9674873425c5056e72899edb
|
[
"MIT"
] | 11
|
2021-03-01T08:19:00.000Z
|
2021-08-25T14:46:18.000Z
|
util/net/config_utils/__init__.py
|
ELO-Lab/MD-MOENAS
|
edd6ec8c3f89cfbe9674873425c5056e72899edb
|
[
"MIT"
] | 21
|
2021-01-14T00:47:54.000Z
|
2022-03-29T07:14:41.000Z
|
from .configure_utils import load_config, load_config_dict, merge_config_dict, dict2config, configure2str
| 53
| 105
| 0.877358
| 14
| 106
| 6.214286
| 0.714286
| 0.229885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020408
| 0.075472
| 106
| 1
| 106
| 106
| 0.867347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e1b910490682ea365094f70644fb04fc5abd98e7
| 43
|
py
|
Python
|
slashx2.py
|
joshavenue/TIL-python
|
9389260ae19657f3f70b90cca1b2cc23c5618a8d
|
[
"MIT"
] | null | null | null |
slashx2.py
|
joshavenue/TIL-python
|
9389260ae19657f3f70b90cca1b2cc23c5618a8d
|
[
"MIT"
] | null | null | null |
slashx2.py
|
joshavenue/TIL-python
|
9389260ae19657f3f70b90cca1b2cc23c5618a8d
|
[
"MIT"
] | null | null | null |
x = 10 / 3
x = 3.333333
y = 10 // 3
y = 3
| 7.166667
| 12
| 0.418605
| 11
| 43
| 1.636364
| 0.454545
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.538462
| 0.395349
| 43
| 5
| 13
| 8.6
| 0.153846
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1cb7e410795101740f749c09c39ab61379ca3d5
| 20,060
|
py
|
Python
|
src/svgp.py
|
harrisonzhu508/MVBAgg
|
ad2781af7faffeb18ca3613f0b619461291037d2
|
[
"MIT"
] | null | null | null |
src/svgp.py
|
harrisonzhu508/MVBAgg
|
ad2781af7faffeb18ca3613f0b619461291037d2
|
[
"MIT"
] | null | null | null |
src/svgp.py
|
harrisonzhu508/MVBAgg
|
ad2781af7faffeb18ca3613f0b619461291037d2
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
from gpflow.kullback_leiblers import gauss_kl
from gpflow.models import (
SVGP,
GPModel,
ExternalDataTrainingLossMixin,
)
from gpflow.models.util import inducingpoint_wrapper
from gpflow import Parameter
from gpflow.config import default_float
from gpflow.utilities import positive, triangular
from gpflow import posteriors
from src.posteriors import MVBAggPosterior, VBAggPosterior
import numpy as np
class VBagg(SVGP):
"""
This is the VBagg
"""
def __init__(
self,
kernel,
likelihood,
inducing_variable,
mean_function=None,
num_latent_gps: int = 1,
q_diag: bool = False,
q_mu=None,
q_sqrt=None,
whiten: bool = True,
num_data=None,
):
"""
Modified from https://gpflow.readthedocs.io/en/master/notebooks/advanced/gps_for_big_data.html
"""
# init the super class, accept args
super().__init__(
kernel,
likelihood,
inducing_variable=inducing_variable,
mean_function=mean_function,
num_latent_gps=num_latent_gps,
)
self.num_data = num_data
self.q_diag = q_diag
self.whiten = whiten
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
# init variational parameters
num_inducing = len(self.inducing_variable)
self._init_variational_parameters(num_inducing, q_mu, q_sqrt, q_diag)
def elbo(self, data) -> tf.Tensor:
"""
This gives a variational bound (the evidence lower bound or ELBO) on
the log marginal likelihood of the model.
"""
N, w, X, Y = data
kl = self.prior_kl()
f_mean, f_var = self.predict_aggregated(w, X)
var_exp = self.likelihood.variational_expectations(
f_mean,
f_var,
Y[:, :, 0],
)
if self.num_data is not None:
num_data = tf.cast(self.num_data, kl.dtype)
minibatch_size = tf.cast(tf.shape(X)[0], kl.dtype)
scale = num_data / minibatch_size
else:
scale = tf.cast(1.0, kl.dtype)
return tf.reduce_sum(var_exp) * scale - kl
def predict_aggregated(
self, w, X, full_cov: bool = False, full_output_cov: bool = False
):
return self.posterior(
posteriors.PrecomputeCacheType.NOCACHE
).fused_predict_aggregated(
w, X, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_aggregated_i(
self,
w: np.ndarray,
X: np.ndarray,
i: int,
full_cov: bool = False,
full_output_cov: bool = False,
):
return self.posterior(
posteriors.PrecomputeCacheType.NOCACHE
).fused_predict_aggregated_i(
w, X, i, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_f(self, Xnew, full_cov: bool = False, full_output_cov: bool = False):
assert (
tf.shape(Xnew)[-1] == self.inducing_variable.Z.shape[1]
), f"Input X has to have last dimension {self.inducing_variable.Z.shape[1]}"
return self.posterior(posteriors.PrecomputeCacheType.NOCACHE).fused_predict_f(
Xnew, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_f_i(
self, Xnew: np.ndarray, i: int, full_cov=False, full_output_cov=False
):
assert tf.shape(Xnew)[-1] == len(
self.kernel.kernels[i].active_dims
), f"Input X has to have last dimension {len(self.kernel.kernels[i].active_dims)}"
return self.posterior(posteriors.PrecomputeCacheType.NOCACHE).fused_predict_f_i(
Xnew, i, full_cov=full_cov, full_output_cov=full_output_cov
)
def posterior(self, precompute_cache=posteriors.PrecomputeCacheType.TENSOR):
"""
Create the Posterior object which contains precomputed matrices for
faster prediction.
precompute_cache has three settings:
- `PrecomputeCacheType.TENSOR` (or `"tensor"`): Precomputes the cached
quantities and stores them as tensors (which allows differentiating
through the prediction). This is the default.
- `PrecomputeCacheType.VARIABLE` (or `"variable"`): Precomputes the cached
quantities and stores them as variables, which allows for updating
their values without changing the compute graph (relevant for AOT
compilation).
- `PrecomputeCacheType.NOCACHE` (or `"nocache"` or `None`): Avoids
immediate cache computation. This is useful for avoiding extraneous
computations when you only want to call the posterior's
`fused_predict_f` method.
"""
return VBAggPosterior(
kernel=self.kernel,
inducing_variable=self.inducing_variable,
q_mu=self.q_mu,
q_sqrt=self.q_sqrt,
whiten=self.whiten,
mean_function=self.mean_function,
precompute_cache=precompute_cache,
)
def predict_log_density(
self, f_mean, f_var, Ynew, full_cov: bool = False, full_output_cov: bool = False
) -> tf.Tensor:
"""
Compute the log density of the data at the new data points.
"""
if full_cov or full_output_cov:
# See https://github.com/GPflow/GPflow/issues/1461
raise NotImplementedError(
"The predict_log_density method currently supports only the argument values full_cov=False and full_output_cov=False"
)
return self.likelihood.predict_log_density(f_mean, f_var, Ynew)
def compute_sobol(self, df_scaled, list_columns):
"""df_scaled: a dataframe containing all the data used for Sobol estimation
We compute the first order Sobol index
"""
# alpha = K_ZZ^{-1} @ m
alpha = self.posterior().alpha
Z = self.inducing_variable.Z
all_features = []
for list in list_columns:
all_features += list
Kmn = self.kernel(df_scaled[all_features].values, Z)
alpha_j = Kmn @ alpha
variance_full = (
np.mean(alpha_j ** 2)
- np.mean(alpha_j) ** 2
+ self.likelihood.variance.numpy()
)
alpha_list = []
# store posterior means for each component
# To compute E_{X1,..,Xd}[mean|X_l], we first calculate E_{X_l}[mean^l] for each l
for i, cols in enumerate(list_columns):
Z_tmp = tf.gather(Z, indices=self.kernel.kernels[i].active_dims, axis=1)
Kmn = self.kernel.kernels[i].K(df_scaled[cols].values, Z_tmp)
alpha_j = Kmn @ alpha
alpha_list.append(alpha_j)
sobol = {}
# compute the 1st order Sobols
for i in range(len(list_columns)):
sobol[f"{i}"] = (
np.mean(alpha_list[i] ** 2) - np.mean(alpha_list[i]) ** 2
) / variance_full
# compute the 2nd order Sobols
for i in range(len(list_columns)):
for j in range(i+1, len(list_columns)):
sobol[f"{i}-{j}"] = (
np.mean(
2
* (alpha_list[i] - np.mean(alpha_list[i]))
* (alpha_list[j] - np.mean(alpha_list[j]))
)
/ variance_full
)
return sobol, variance_full
class MVBAgg(GPModel, ExternalDataTrainingLossMixin):
"""
This is the MVBagg
"""
def __init__(
self,
kernel,
likelihood,
inducing_variable,
num_resolution,
mean_function=None,
num_latent_gps: int = 1,
q_diag: bool = False,
q_mu=None,
q_sqrt=None,
whiten: bool = True,
num_data=None,
):
"""
Modified from https://gpflow.readthedocs.io/en/master/notebooks/advanced/gps_for_big_data.html
"""
# init the super class, accept args
super().__init__(
kernel,
likelihood,
mean_function=mean_function,
num_latent_gps=num_latent_gps,
)
self.num_data = num_data
self.q_diag = q_diag
self.whiten = whiten
self.num_resolution = num_resolution
self.inducing_variable = inducingpoint_wrapper(inducing_variable)
num_inducing = inducing_variable.shape[0]
self._init_variational_parameters(num_inducing, q_mu, q_sqrt, q_diag)
def prior_kl(self) -> tf.Tensor:
"""Only implement whitened variational posterior"""
return gauss_kl(self.q_mu, self.q_sqrt, None)
def elbo(self, data) -> tf.Tensor:
"""
This gives a variational bound (the evidence lower bound or ELBO) on
the log marginal likelihood of the model.
"""
kl = self.prior_kl()
f_mean, f_var = self.predict_aggregated(data)
Y = data[-1]
var_exp = self.likelihood.variational_expectations(
f_mean,
f_var,
Y[:, :, 0],
)
if self.num_data is not None:
num_data = tf.cast(self.num_data, kl.dtype)
minibatch_size = tf.cast(tf.shape(data[0])[0], kl.dtype)
scale = num_data / minibatch_size
else:
scale = tf.cast(1.0, kl.dtype)
return tf.reduce_sum(var_exp) * scale - kl
def maximum_log_likelihood_objective(self, data) -> tf.Tensor:
return self.elbo(data)
def predict_aggregated(
self, data, full_cov: bool = False, full_output_cov: bool = False
):
return self.posterior(
posteriors.PrecomputeCacheType.NOCACHE
).fused_predict_aggregated(
data, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_aggregated_i(
self,
w: np.ndarray,
X: np.ndarray,
i: int,
full_cov: bool = False,
full_output_cov: bool = False,
):
return self.posterior(
posteriors.PrecomputeCacheType.NOCACHE
).fused_predict_aggregated_i(
w, X, i, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_f(self, Xnew, full_cov: bool = False, full_output_cov: bool = False):
assert (
tf.shape(Xnew)[-1] == self.inducing_variable.Z.shape[1]
), f"Input X has to have last dimension {self.inducing_variable.Z.shape[1]}"
return self.posterior(posteriors.PrecomputeCacheType.NOCACHE).fused_predict_f(
Xnew, full_cov=full_cov, full_output_cov=full_output_cov
)
def predict_f_i(
self, Xnew: np.ndarray, i: int, full_cov=False, full_output_cov=False
):
assert tf.shape(Xnew)[-1] == len(
self.kernel.kernels[i].active_dims
), f"Input X has to have last dimension {len(self.kernel.kernels[i].active_dims)}"
return self.posterior(posteriors.PrecomputeCacheType.NOCACHE).fused_predict_f_i(
Xnew, i, full_cov=full_cov, full_output_cov=full_output_cov
)
def posterior(self, precompute_cache=posteriors.PrecomputeCacheType.TENSOR):
"""
Create the Posterior object which contains precomputed matrices for
faster prediction.
precompute_cache has three settings:
- `PrecomputeCacheType.TENSOR` (or `"tensor"`): Precomputes the cached
quantities and stores them as tensors (which allows differentiating
through the prediction). This is the default.
- `PrecomputeCacheType.VARIABLE` (or `"variable"`): Precomputes the cached
quantities and stores them as variables, which allows for updating
their values without changing the compute graph (relevant for AOT
compilation).
- `PrecomputeCacheType.NOCACHE` (or `"nocache"` or `None`): Avoids
immediate cache computation. This is useful for avoiding extraneous
computations when you only want to call the posterior's
`fused_predict_f` method.
"""
return MVBAggPosterior(
num_resolution=self.num_resolution,
kernel=self.kernel,
inducing_variable=self.inducing_variable,
q_mu=self.q_mu,
q_sqrt=self.q_sqrt,
whiten=self.whiten,
mean_function=self.mean_function,
precompute_cache=precompute_cache,
)
def predict_log_density(
self, f_mean, f_var, Ynew, full_cov: bool = False, full_output_cov: bool = False
) -> tf.Tensor:
"""
Compute the log density of the data at the new data points.
"""
if full_cov or full_output_cov:
# See https://github.com/GPflow/GPflow/issues/1461
raise NotImplementedError(
"The predict_log_density method currently supports only the argument values full_cov=False and full_output_cov=False"
)
return self.likelihood.predict_log_density(f_mean, f_var, Ynew)
def _init_variational_parameters(self, num_inducing, q_mu, q_sqrt, q_diag):
"""
Constructs the mean and cholesky of the covariance of the variational Gaussian posterior.
If a user passes values for `q_mu` and `q_sqrt` the routine checks if they have consistent
and correct shapes. If a user does not specify any values for `q_mu` and `q_sqrt`, the routine
initializes them, their shape depends on `num_inducing` and `q_diag`.
Note: most often the comments refer to the number of observations (=output dimensions) with P,
number of latent GPs with L, and number of inducing points M. Typically P equals L,
but when certain multioutput kernels are used, this can change.
Parameters
----------
:param num_inducing: int
Number of inducing variables, typically refered to as M.
:param q_mu: np.array or None
Mean of the variational Gaussian posterior. If None the function will initialise
the mean with zeros. If not None, the shape of `q_mu` is checked.
:param q_sqrt: np.array or None
Cholesky of the covariance of the variational Gaussian posterior.
If None the function will initialise `q_sqrt` with identity matrix.
If not None, the shape of `q_sqrt` is checked, depending on `q_diag`.
:param q_diag: bool
Used to check if `q_mu` and `q_sqrt` have the correct shape or to
construct them with the correct shape. If `q_diag` is true,
`q_sqrt` is two dimensional and only holds the square root of the
covariance diagonal elements. If False, `q_sqrt` is three dimensional.
"""
q_mu = np.zeros((num_inducing, self.num_latent_gps)) if q_mu is None else q_mu
self.q_mu = Parameter(q_mu, dtype=default_float()) # [M, P]
if q_sqrt is None:
if self.q_diag:
ones = np.ones(
(num_inducing, self.num_latent_gps), dtype=default_float()
)
self.q_sqrt = Parameter(ones, transform=positive()) # [M, P]
else:
q_sqrt = [
np.eye(num_inducing, dtype=default_float())
for _ in range(self.num_latent_gps)
]
q_sqrt = np.array(q_sqrt)
self.q_sqrt = Parameter(q_sqrt, transform=triangular()) # [P, M, M]
else:
if q_diag:
assert q_sqrt.ndim == 2
self.num_latent_gps = q_sqrt.shape[1]
self.q_sqrt = Parameter(q_sqrt, transform=positive()) # [M, L|P]
else:
assert q_sqrt.ndim == 3
self.num_latent_gps = q_sqrt.shape[0]
num_inducing = q_sqrt.shape[1]
self.q_sqrt = Parameter(q_sqrt, transform=triangular()) # [L|P, M, M]
def compute_sobol(self, df_scaled, list_columns):
"""df_scaled: a dataframe containing all the data used for Sobol estimation
We compute the first order Sobol index
"""
# alpha = K_ZZ^{-1} @ m
alpha = self.posterior().alpha
Z = self.inducing_variable.Z
all_features = []
for list in list_columns:
all_features += list
Kmn = self.kernel(df_scaled[all_features].values, Z)
alpha_j = Kmn @ alpha
variance_full = (
np.mean(alpha_j ** 2)
- np.mean(alpha_j) ** 2
+ self.likelihood.variance.numpy()
)
alpha_list = []
# store posterior means for each component
# To compute E_{X1,..,Xd}[mean|X_l], we first calculate E_{X_l}[mean^l] for each l
for i, cols in enumerate(list_columns):
Z_tmp = tf.gather(Z, indices=self.kernel.kernels[i].active_dims, axis=1)
Kmn = self.kernel.kernels[i].K(df_scaled[cols].values, Z_tmp)
alpha_j = Kmn @ alpha
alpha_list.append(alpha_j)
sobol = {}
# compute the 1st order Sobols
for i in range(len(list_columns)):
sobol[f"{i}"] = (
np.mean(alpha_list[i] ** 2) - np.mean(alpha_list[i]) ** 2
) / variance_full
# compute the 2nd order Sobols
for i in range(len(list_columns)):
for j in range(i+1, len(list_columns)):
sobol[f"{i}-{j}"] = (
np.mean(
2
* (alpha_list[i] - np.mean(alpha_list[i]))
* (alpha_list[j] - np.mean(alpha_list[j]))
)
/ variance_full
)
return sobol, variance_full
class MVBAggBinomial(MVBAgg):
def elbo(self, data) -> tf.Tensor:
"""
This gives a variational bound (the evidence lower bound or ELBO) on
the log marginal likelihood of the model.
"""
kl = self.prior_kl()
f_mean, f_var = self.predict_aggregated(data)
counts = data[-2]
Y = data[-1]
var_exp = self.likelihood.variational_expectations(
f_mean, f_var, Y[:, :, 0], counts[:, :, 0]
)
if self.num_data is not None:
num_data = tf.cast(self.num_data, kl.dtype)
minibatch_size = tf.cast(tf.shape(data[0])[0], kl.dtype)
scale = num_data / minibatch_size
else:
scale = tf.cast(1.0, kl.dtype)
return tf.reduce_sum(var_exp) * scale - kl
def predict_log_density(
self,
f_mean,
f_var,
Ynew,
counts,
full_cov: bool = False,
full_output_cov: bool = False,
) -> tf.Tensor:
"""
Compute the log density of the data at the new data points.
"""
if full_cov or full_output_cov:
# See https://github.com/GPflow/GPflow/issues/1461
raise NotImplementedError(
"The predict_log_density method currently supports only the argument values full_cov=False and full_output_cov=False"
)
return self.likelihood.predict_log_density(f_mean, f_var, Ynew, counts)
class SVGPBinomial(SVGP):
def elbo(self, data) -> tf.Tensor:
"""
This gives a variational bound (the evidence lower bound or ELBO) on
the log marginal likelihood of the model.
"""
X, Y = data
counts = X[:, -1:]
X = X[:, :-1]
kl = self.prior_kl()
f_mean, f_var = self.predict_f(X, full_cov=False, full_output_cov=False)
var_exp = self.likelihood.variational_expectations(f_mean, f_var, Y, counts)
if self.num_data is not None:
num_data = tf.cast(self.num_data, kl.dtype)
minibatch_size = tf.cast(tf.shape(X)[0], kl.dtype)
scale = num_data / minibatch_size
else:
scale = tf.cast(1.0, kl.dtype)
return tf.reduce_sum(var_exp) * scale - kl
| 37.777778
| 133
| 0.600249
| 2,549
| 20,060
| 4.523342
| 0.120832
| 0.014744
| 0.038335
| 0.022203
| 0.82732
| 0.821943
| 0.814397
| 0.787858
| 0.781266
| 0.776062
| 0
| 0.005273
| 0.30992
| 20,060
| 530
| 134
| 37.849057
| 0.827639
| 0.238185
| 0
| 0.739496
| 0
| 0
| 0.045348
| 0.01484
| 0
| 0
| 0
| 0
| 0.016807
| 1
| 0.067227
| false
| 0
| 0.028011
| 0.014006
| 0.165266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83239cf55f4c3b7f67c5ab653df6191d819b4b7f
| 6,372
|
py
|
Python
|
fisher_py/data/ft_average_options.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 3
|
2021-11-03T20:55:45.000Z
|
2022-02-01T10:11:47.000Z
|
fisher_py/data/ft_average_options.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 2
|
2022-01-28T02:04:21.000Z
|
2022-01-29T01:29:14.000Z
|
fisher_py/data/ft_average_options.py
|
abdelq/fisher_py
|
befb98732ba7c4e57858d158c68cda09ed829d66
|
[
"MIT"
] | 1
|
2022-01-26T23:30:37.000Z
|
2022-01-26T23:30:37.000Z
|
from fisher_py.net_wrapping import NetWrapperBase, ThermoFisher
class FtAverageOptions(NetWrapperBase):
"""
Options which can be used to control the Ft / Orbitrap averaging
"""
_wrapped_type = ThermoFisher.CommonCore.Data.Interfaces.FtAverageOptions
def __init__(self):
super().__init__()
self._wrapped_object = self._wrapped_type()
@property
def max_charge_determinations(self) -> int:
"""
Gets or sets the maximum number of ions which are sent to the charge pattern
calculation (starting from most intense)
"""
return self._get_wrapped_object_().MaxChargeDeterminations
@max_charge_determinations.setter
def max_charge_determinations(self, value: int):
"""
Gets or sets the maximum number of ions which are sent to the charge pattern
calculation (starting from most intense)
"""
assert type(value) is int
self._get_wrapped_object_().MaxChargeDeterminations = value
@property
def merge_in_parallel(self) -> bool:
"""
Gets or sets a value indicating whether parallel code may be used for resampling
and merging scans. Tuning option: Permit separate threads to be used for resampling
profiles.
"""
return self._get_wrapped_object_().MergeInParallel
@merge_in_parallel.setter
def merge_in_parallel(self, value: bool):
"""
Gets or sets a value indicating whether parallel code may be used for resampling
and merging scans. Tuning option: Permit separate threads to be used for resampling
profiles.
"""
assert type(value) is bool
self._get_wrapped_object_().MergeInParallel = value
@property
def max_scans_merged(self) -> int:
"""
Gets or sets the maximum number of scans which can be merged at once. This feature
is currently not yet implemented, and the value is ignored. When MergeInParallel
is enabled: this restricts the number of scans which are merged in each group.
Setting this too large may result in more memory allocation for "arrays of results
to merge" Default: 10
"""
return self._get_wrapped_object_().MaxScansMerged
@max_scans_merged.setter
def max_scans_merged(self, value: int):
"""
Gets or sets the maximum number of scans which can be merged at once. This feature
is currently not yet implemented, and the value is ignored. When MergeInParallel
is enabled: this restricts the number of scans which are merged in each group.
Setting this too large may result in more memory allocation for "arrays of results
to merge" Default: 10
"""
assert type(value) is int
self._get_wrapped_object_().MaxScansMerged = value
@property
def merge_task_batching(self) -> int:
"""
Gets or sets the minimum number of Re-sample tasks per thread. Tuning parameter
when MergeInParallel is set. Each scan is analyzed: Determining mass regions
which contain non-zero data, and re-sampling the intensity data aligned to a
set of output bins. After all scans have been re-sampled, the re-sampled data
has to be merged into the final output. Creating re-sampled data for profiles
is a fairly fast task. It may be inefficient to queue workers to created the
merged data for each scan in the batch. Setting this >1 will reduce threading
overheads, when averaging small batches of scans with low intensity peaks. Default:
2. This feature only affects the re-sampling, as the final merge of the re-sampled
data is single threaded.
"""
return self._get_wrapped_object_().MergeTaskBatching
@merge_task_batching.setter
def merge_task_batching(self, value: int):
"""
Gets or sets the minimum number of Re-sample tasks per thread. Tuning parameter
when MergeInParallel is set. Each scan is analyzed: Determining mass regions
which contain non-zero data, and re-sampling the intensity data aligned to a
set of output bins. After all scans have been re-sampled, the re-sampled data
has to be merged into the final output. Creating re-sampled data for profiles
is a fairly fast task. It may be inefficient to queue workers to created the
merged data for each scan in the batch. Setting this >1 will reduce threading
overheads, when averaging small batches of scans with low intensity peaks. Default:
2. This feature only affects the re-sampling, as the final merge of the re-sampled
data is single threaded.
"""
assert type(value) is int
self._get_wrapped_object_().MergeTaskBatching = value
@property
def use_noise_table_when_available(self) -> bool:
"""
Gets or sets a value indicating whether to use the noise and baseline table.
When set: The averaging algorithm calculates average noise based on a noise table
obtained (separately) from the raw file. The "IRawData" interface doe not have
methods to obtain this "noise and baseline table" from the raw file. So: The
scan averaging algorithm (by default) uses noise information saved with centroid
peaks when calculating the averaged noise. This option is only effective when
data is read via the IRawDataPlus interface.
"""
return self._get_wrapped_object_().UseNoiseTableWhenAvailable
@use_noise_table_when_available.setter
def use_noise_table_when_available(self, value: bool):
"""
Gets or sets a value indicating whether to use the noise and baseline table.
When set: The averaging algorithm calculates average noise based on a noise table
obtained (separately) from the raw file. The "IRawData" interface doe not have
methods to obtain this "noise and baseline table" from the raw file. So: The
scan averaging algorithm (by default) uses noise information saved with centroid
peaks when calculating the averaged noise. This option is only effective when
data is read via the IRawDataPlus interface.
"""
assert type(value) is bool
self._get_wrapped_object_().UseNoiseTableWhenAvailable = value
| 47.552239
| 91
| 0.69774
| 861
| 6,372
| 5.056911
| 0.225319
| 0.032843
| 0.022967
| 0.045935
| 0.903307
| 0.786633
| 0.785714
| 0.770326
| 0.770326
| 0.720257
| 0
| 0.001681
| 0.252982
| 6,372
| 133
| 92
| 47.909774
| 0.913025
| 0.605461
| 0
| 0.243902
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121951
| 1
| 0.268293
| false
| 0
| 0.02439
| 0
| 0.463415
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
833517f333b9edb884dfa51e2dda7217552c94d8
| 773
|
py
|
Python
|
chainercv/utils/testing/assertions/__init__.py
|
dani-lbnl/chainercv
|
223fab7dd0045d57db02041d44368fe3e60ea433
|
[
"MIT"
] | 1
|
2017-09-04T22:03:03.000Z
|
2017-09-04T22:03:03.000Z
|
chainercv/utils/testing/assertions/__init__.py
|
dani-lbnl/chainercv
|
223fab7dd0045d57db02041d44368fe3e60ea433
|
[
"MIT"
] | null | null | null |
chainercv/utils/testing/assertions/__init__.py
|
dani-lbnl/chainercv
|
223fab7dd0045d57db02041d44368fe3e60ea433
|
[
"MIT"
] | null | null | null |
from chainercv.utils.testing.assertions.assert_is_bbox import assert_is_bbox # NOQA
from chainercv.utils.testing.assertions.assert_is_classification_dataset import assert_is_classification_dataset # NOQA
from chainercv.utils.testing.assertions.assert_is_detection_dataset import assert_is_detection_dataset # NOQA
from chainercv.utils.testing.assertions.assert_is_detection_link import assert_is_detection_link # NOQA
from chainercv.utils.testing.assertions.assert_is_image import assert_is_image # NOQA
from chainercv.utils.testing.assertions.assert_is_semantic_segmentation_dataset import assert_is_semantic_segmentation_dataset # NOQA
from chainercv.utils.testing.assertions.assert_is_semantic_segmentation_link import assert_is_semantic_segmentation_link # NOQA
| 96.625
| 134
| 0.891332
| 105
| 773
| 6.161905
| 0.161905
| 0.173107
| 0.194745
| 0.270479
| 0.752705
| 0.62442
| 0.62442
| 0.55796
| 0.412674
| 0.401855
| 0
| 0
| 0.063389
| 773
| 7
| 135
| 110.428571
| 0.893646
| 0.043984
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
8351cb07334501fb766b7ba470a133db3b49896e
| 1,178
|
py
|
Python
|
Chapter 06/Chap06_Example6.6.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
Chapter 06/Chap06_Example6.6.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
Chapter 06/Chap06_Example6.6.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
import re
mymatcher = re.finditer('x','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q1
print("------------------")
mymatcher = re.finditer('x+','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q2
print("------------------")
mymatcher = re.finditer('x*','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q3
print("------------------")
mymatcher = re.finditer('x?','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q4
print("------------------")
mymatcher = re.finditer('x{3}','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q5
print("------------------")
mymatcher = re.finditer('x{3,5}','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q6
print("------------------")
mymatcher = re.finditer('^x','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q7
print("------------------")
mymatcher = re.finditer('y$','xyxxyxxxyxxxxy')
for loop in mymatcher:
print(loop.start(), '.....', loop.group()) #Q8
| 36.8125
| 50
| 0.561969
| 132
| 1,178
| 5.015152
| 0.174242
| 0.132931
| 0.229607
| 0.277946
| 0.924471
| 0.924471
| 0.845921
| 0.845921
| 0.845921
| 0.845921
| 0
| 0.010476
| 0.108659
| 1,178
| 32
| 51
| 36.8125
| 0.62
| 0.013582
| 0
| 0.71875
| 0
| 0
| 0.258874
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0.46875
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
3600e6bb747b1c9b8dc55581b773755e90754fa2
| 307
|
py
|
Python
|
tensordata/cv/__init__.py
|
Hourout/tensordata
|
cbef6742ee0d3bfc4b886358fc01618bb5b63603
|
[
"Apache-2.0"
] | 13
|
2019-01-08T10:22:39.000Z
|
2020-06-17T10:02:47.000Z
|
tensordata/cv/__init__.py
|
Hourout/tensordata
|
cbef6742ee0d3bfc4b886358fc01618bb5b63603
|
[
"Apache-2.0"
] | null | null | null |
tensordata/cv/__init__.py
|
Hourout/tensordata
|
cbef6742ee0d3bfc4b886358fc01618bb5b63603
|
[
"Apache-2.0"
] | 1
|
2020-06-17T10:02:49.000Z
|
2020-06-17T10:02:49.000Z
|
from tensordata.cv._cifar import *
from tensordata.cv._mnist import *
from tensordata.cv._mnist_kuzushiji import *
from tensordata.cv._mnist_tibetan import *
from tensordata.cv._coil import *
from tensordata.cv._caltech import *
from tensordata.cv._stl10 import *
from tensordata.cv._mnist_kannada import *
| 34.111111
| 44
| 0.81759
| 43
| 307
| 5.581395
| 0.27907
| 0.466667
| 0.533333
| 0.641667
| 0.45
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007273
| 0.104235
| 307
| 8
| 45
| 38.375
| 0.865455
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
36087180419c316ef0f1abb078531c5c7cf8ee9d
| 11,842
|
py
|
Python
|
ssaicsp_app/ssaicsp/migrations/0001_initial.py
|
ecjtuseclab/SmartRobotControlPlateform
|
eb9ee5e4bd7c115c1928d0197b3fac7fd5a3e5f2
|
[
"MIT"
] | 11
|
2019-09-02T13:39:58.000Z
|
2022-03-22T02:57:37.000Z
|
ssaicsp_app/ssaicsp/migrations/0001_initial.py
|
ecjtuseclab/SmartRobotControlPlateform
|
eb9ee5e4bd7c115c1928d0197b3fac7fd5a3e5f2
|
[
"MIT"
] | null | null | null |
ssaicsp_app/ssaicsp/migrations/0001_initial.py
|
ecjtuseclab/SmartRobotControlPlateform
|
eb9ee5e4bd7c115c1928d0197b3fac7fd5a3e5f2
|
[
"MIT"
] | 3
|
2019-09-05T06:29:10.000Z
|
2022-03-19T11:50:29.000Z
|
# Generated by Django 2.1 on 2018-08-04 15:13
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='activebuzzer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_activebuzzer', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='aroadtracing',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_aroadtracing', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='current',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('current_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='dht11',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('temperature', models.FloatField()),
('humidity', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='equipments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rpi_code', models.CharField(max_length=20)),
('rpi_name', models.CharField(max_length=20)),
('remote_serverhost', models.CharField(max_length=20)),
('remote_serverport', models.IntegerField()),
('local_serverhost', models.CharField(max_length=20)),
('local_serverport', models.IntegerField()),
('local_servermaxconcount', models.IntegerField()),
('local_clientcount', models.IntegerField()),
('sendtime', models.IntegerField()),
('checkcontime', models.IntegerField()),
('equipmentInfos', models.CharField(max_length=255, null=True)),
('equipmentkey', models.CharField(max_length=255, null=True)),
('r1', models.CharField(max_length=255, null=True)),
('r2', models.CharField(max_length=255, null=True)),
('create_time', models.DateTimeField()),
('remark', models.CharField(max_length=200, null=True)),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='fire',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_fire', models.IntegerField()),
('fire_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='human',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_human', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='infraredemission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_infraredemission', models.IntegerField()),
('infraredemission_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='infraredreception',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_infraredreception', models.IntegerField()),
('infraredreception_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='led',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_led', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='light',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_light', models.IntegerField()),
('light_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='obstacleavoidance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_obstacleavoidance', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='pins',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pin', models.IntegerField()),
('wPi', models.IntegerField()),
('BCM', models.IntegerField()),
('description', models.CharField(max_length=20)),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='propertymapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('property_name', models.CharField(max_length=20)),
('property_value', models.CharField(max_length=20)),
('property_meaning', models.CharField(max_length=50)),
('remark', models.CharField(max_length=255)),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='rain',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_rain', models.IntegerField()),
('rain_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='relay',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_relay', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='sensors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rpi_code', models.CharField(max_length=20)),
('sensor_code', models.CharField(max_length=3)),
('sensor_name', models.CharField(max_length=20)),
('type', models.IntegerField()),
('acqfre', models.FloatField()),
('tranfre', models.FloatField()),
('enable', models.IntegerField()),
('keep_time', models.IntegerField()),
('rediscount', models.IntegerField()),
('pins', models.CharField(max_length=50)),
('parameters', models.CharField(max_length=100)),
('create_time', models.DateTimeField()),
('remark', models.CharField(max_length=200, null=True)),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='smoke',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_smoke', models.IntegerField()),
('smoke_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='soil',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_soil', models.IntegerField()),
('soil_value', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='sound',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('is_sound', models.IntegerField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
migrations.CreateModel(
name='ultrasonic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sensor_code', models.CharField(max_length=3)),
('distance', models.FloatField()),
('create_time', models.DateTimeField()),
('status', models.IntegerField()),
],
),
]
| 45.546154
| 114
| 0.5266
| 979
| 11,842
| 6.191011
| 0.107252
| 0.142551
| 0.109883
| 0.14651
| 0.803663
| 0.785349
| 0.749711
| 0.720838
| 0.720838
| 0.709454
| 0
| 0.010245
| 0.324101
| 11,842
| 259
| 115
| 45.722008
| 0.747002
| 0.003631
| 0
| 0.670635
| 1
| 0
| 0.121387
| 0.005764
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003968
| 0
| 0.019841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
361d68fe36fc3350864bbba85dffb6ea4406dc4e
| 1,178
|
py
|
Python
|
polysome_rnaseq/flats/make_merged_flats.py
|
oaxiom/hesc_lincrna
|
b6dfb023787022b2dbb167e04925f467fdd7849c
|
[
"MIT"
] | null | null | null |
polysome_rnaseq/flats/make_merged_flats.py
|
oaxiom/hesc_lincrna
|
b6dfb023787022b2dbb167e04925f467fdd7849c
|
[
"MIT"
] | null | null | null |
polysome_rnaseq/flats/make_merged_flats.py
|
oaxiom/hesc_lincrna
|
b6dfb023787022b2dbb167e04925f467fdd7849c
|
[
"MIT"
] | null | null | null |
from glbase3 import *
wig_to_flat(['../star/hesc_cyto.rp1.Signal.UniqueMultiple.str1.out.wig.gz',
'../star/hesc_cyto.rp2.Signal.UniqueMultiple.str1.out.wig.gz'], 'hesc_cyto.flat', 'hESC Cytoplasm', skip_non_standard_chroms=True, gzip=True)
wig_to_flat(['../star/hesc_monosome.rp1.Signal.UniqueMultiple.str1.out.wig.gz',
'../star/hesc_monosome.rp2.Signal.UniqueMultiple.str1.out.wig.gz'], 'hesc_monosome.flat', 'hESC Monosome', skip_non_standard_chroms=True, gzip=True)
wig_to_flat(['../star/hesc_nuc.rp1.Signal.UniqueMultiple.str1.out.wig.gz',
'../star/hesc_nuc.rp2.Signal.UniqueMultiple.str1.out.wig.gz'], 'hesc_nuc.flat', 'hESC Nucleus', skip_non_standard_chroms=True, gzip=True)
wig_to_flat(['../star/hesc_poly_high.rp1.Signal.UniqueMultiple.str1.out.wig.gz',
'../star/hesc_poly_high.rp2.Signal.UniqueMultiple.str1.out.wig.gz'], 'hesc_polyhigh.flat', 'hESC Polysome high', skip_non_standard_chroms=True, gzip=True)
wig_to_flat(['../star/hesc_poly_low.rp1.Signal.UniqueMultiple.str1.out.wig.gz',
'../star/hesc_poly_low.rp2.Signal.UniqueMultiple.str1.out.wig.gz'], 'hesc_polylow.flat', 'hESC Polysome low', skip_non_standard_chroms=True, gzip=True)
| 58.9
| 158
| 0.762309
| 184
| 1,178
| 4.641304
| 0.173913
| 0.093677
| 0.28103
| 0.316159
| 0.791569
| 0.771663
| 0.771663
| 0.733021
| 0.504684
| 0.35363
| 0
| 0.018936
| 0.058574
| 1,178
| 19
| 159
| 62
| 0.751127
| 0
| 0
| 0
| 0
| 0
| 0.653061
| 0.522109
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
363e7247225d096226593ad6f3a55664b55cf1c1
| 5,424
|
py
|
Python
|
example/Si_fitting.py
|
jochym/ALM
|
6552b6d818c2cb83f90193ebba839fd85d6957f1
|
[
"MIT"
] | 10
|
2017-01-12T21:18:36.000Z
|
2020-07-27T11:07:16.000Z
|
example/Si_fitting.py
|
jochym/ALM
|
6552b6d818c2cb83f90193ebba839fd85d6957f1
|
[
"MIT"
] | 25
|
2018-04-24T07:32:17.000Z
|
2021-04-20T03:08:54.000Z
|
example/Si_fitting.py
|
jochym/ALM
|
6552b6d818c2cb83f90193ebba839fd85d6957f1
|
[
"MIT"
] | 12
|
2016-11-08T22:56:38.000Z
|
2021-04-20T02:27:38.000Z
|
#
# Si_fitting.py
# # This is an example to run ALM in the fitting mode.
#
from alm import ALM
import numpy as np
lavec = [[20.406, 0, 0],
[0, 20.406, 0],
[0, 0, 20.406]]
xcoord = [[0.0000000000000000, 0.0000000000000000, 0.0000000000000000],
[0.0000000000000000, 0.0000000000000000, 0.5000000000000000],
[0.0000000000000000, 0.2500000000000000, 0.2500000000000000],
[0.0000000000000000, 0.2500000000000000, 0.7500000000000000],
[0.0000000000000000, 0.5000000000000000, 0.0000000000000000],
[0.0000000000000000, 0.5000000000000000, 0.5000000000000000],
[0.0000000000000000, 0.7500000000000000, 0.2500000000000000],
[0.0000000000000000, 0.7500000000000000, 0.7500000000000000],
[0.1250000000000000, 0.1250000000000000, 0.1250000000000000],
[0.1250000000000000, 0.1250000000000000, 0.6250000000000000],
[0.1250000000000000, 0.3750000000000000, 0.3750000000000000],
[0.1250000000000000, 0.3750000000000000, 0.8750000000000000],
[0.1250000000000000, 0.6250000000000000, 0.1250000000000000],
[0.1250000000000000, 0.6250000000000000, 0.6250000000000000],
[0.1250000000000000, 0.8750000000000000, 0.3750000000000000],
[0.1250000000000000, 0.8750000000000000, 0.8750000000000000],
[0.2500000000000000, 0.0000000000000000, 0.2500000000000000],
[0.2500000000000000, 0.0000000000000000, 0.7500000000000000],
[0.2500000000000000, 0.2500000000000000, 0.0000000000000000],
[0.2500000000000000, 0.2500000000000000, 0.5000000000000000],
[0.2500000000000000, 0.5000000000000000, 0.2500000000000000],
[0.2500000000000000, 0.5000000000000000, 0.7500000000000000],
[0.2500000000000000, 0.7500000000000000, 0.0000000000000000],
[0.2500000000000000, 0.7500000000000000, 0.5000000000000000],
[0.3750000000000000, 0.1250000000000000, 0.3750000000000000],
[0.3750000000000000, 0.1250000000000000, 0.8750000000000000],
[0.3750000000000000, 0.3750000000000000, 0.1250000000000000],
[0.3750000000000000, 0.3750000000000000, 0.6250000000000000],
[0.3750000000000000, 0.6250000000000000, 0.3750000000000000],
[0.3750000000000000, 0.6250000000000000, 0.8750000000000000],
[0.3750000000000000, 0.8750000000000000, 0.1250000000000000],
[0.3750000000000000, 0.8750000000000000, 0.6250000000000000],
[0.5000000000000000, 0.0000000000000000, 0.0000000000000000],
[0.5000000000000000, 0.0000000000000000, 0.5000000000000000],
[0.5000000000000000, 0.2500000000000000, 0.2500000000000000],
[0.5000000000000000, 0.2500000000000000, 0.7500000000000000],
[0.5000000000000000, 0.5000000000000000, 0.0000000000000000],
[0.5000000000000000, 0.5000000000000000, 0.5000000000000000],
[0.5000000000000000, 0.7500000000000000, 0.2500000000000000],
[0.5000000000000000, 0.7500000000000000, 0.7500000000000000],
[0.6250000000000000, 0.1250000000000000, 0.1250000000000000],
[0.6250000000000000, 0.1250000000000000, 0.6250000000000000],
[0.6250000000000000, 0.3750000000000000, 0.3750000000000000],
[0.6250000000000000, 0.3750000000000000, 0.8750000000000000],
[0.6250000000000000, 0.6250000000000000, 0.1250000000000000],
[0.6250000000000000, 0.6250000000000000, 0.6250000000000000],
[0.6250000000000000, 0.8750000000000000, 0.3750000000000000],
[0.6250000000000000, 0.8750000000000000, 0.8750000000000000],
[0.7500000000000000, 0.0000000000000000, 0.2500000000000000],
[0.7500000000000000, 0.0000000000000000, 0.7500000000000000],
[0.7500000000000000, 0.2500000000000000, 0.0000000000000000],
[0.7500000000000000, 0.2500000000000000, 0.5000000000000000],
[0.7500000000000000, 0.5000000000000000, 0.2500000000000000],
[0.7500000000000000, 0.5000000000000000, 0.7500000000000000],
[0.7500000000000000, 0.7500000000000000, 0.0000000000000000],
[0.7500000000000000, 0.7500000000000000, 0.5000000000000000],
[0.8750000000000000, 0.1250000000000000, 0.3750000000000000],
[0.8750000000000000, 0.1250000000000000, 0.8750000000000000],
[0.8750000000000000, 0.3750000000000000, 0.1250000000000000],
[0.8750000000000000, 0.3750000000000000, 0.6250000000000000],
[0.8750000000000000, 0.6250000000000000, 0.3750000000000000],
[0.8750000000000000, 0.6250000000000000, 0.8750000000000000],
[0.8750000000000000, 0.8750000000000000, 0.1250000000000000],
[0.8750000000000000, 0.8750000000000000, 0.6250000000000000]]
kd = [14] * 64
force = np.loadtxt("force.dat").reshape((-1, 64, 3))[[0]]
disp = np.loadtxt("disp.dat").reshape((-1, 64, 3))[[0]]
# alm.alm_new() and alm.alm_delete() are done by 'with' statement
with ALM(lavec, xcoord, kd) as alm:
alm.define(1)
alm.displacements = disp
alm.forces = force
info = alm.optimize()
fc_values, elem_indices = alm.get_fc(1, mode='all')
c = "xyz"
for (fc, elem) in zip(fc_values, elem_indices):
v1 = elem[0] // 3
c1 = elem[0] % 3
v2 = elem[1] // 3
c2 = elem[1] % 3
print("%f %d%s %d%s" % ((fc, v1 + 1, c[c1], v2 + 1, c[c2])))
| 56.5
| 71
| 0.684919
| 526
| 5,424
| 7.047529
| 0.136882
| 0.110062
| 0.116536
| 0.055031
| 0.889938
| 0.885622
| 0.877529
| 0.77475
| 0.77475
| 0
| 0
| 0.757071
| 0.19174
| 5,424
| 95
| 72
| 57.094737
| 0.088504
| 0.023968
| 0
| 0
| 0
| 0
| 0.006621
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023529
| 0
| 0.023529
| 0.011765
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36674ff7960050003012d866a228317f3bc9b32d
| 159
|
py
|
Python
|
Geog5222/misc/GDALTest.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
Geog5222/misc/GDALTest.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
Geog5222/misc/GDALTest.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
from osgeo import gdal
from osgeo import ogr
from osgeo import osr
from osgeo import gdal_array
from osgeo import gdalconst
from osgeo.gdalconst import *
| 22.714286
| 29
| 0.805031
| 25
| 159
| 5.08
| 0.32
| 0.425197
| 0.590551
| 0.299213
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.18239
| 159
| 6
| 30
| 26.5
| 0.976923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7fe9a26c5e57c7c5ab0dcb0508f526f9cfaf479f
| 18,846
|
py
|
Python
|
net/utils/meter.py
|
sdjsngs/Cross-Epoch-Learning-for-Weakly-Supervised-Anomaly-Detection-in-Surveillance-Videos
|
f734db8d440f2974cb6b4234b30da6856ef62ce3
|
[
"MIT"
] | 3
|
2021-07-30T04:45:08.000Z
|
2022-02-23T12:44:16.000Z
|
net/utils/meter.py
|
sdjsngs/Cross-Epoch-Learning-for-Weakly-Supervised-Anomaly-Detection-in-Surveillance-Videos
|
f734db8d440f2974cb6b4234b30da6856ef62ce3
|
[
"MIT"
] | null | null | null |
net/utils/meter.py
|
sdjsngs/Cross-Epoch-Learning-for-Weakly-Supervised-Anomaly-Detection-in-Surveillance-Videos
|
f734db8d440f2974cb6b4234b30da6856ef62ce3
|
[
"MIT"
] | 3
|
2021-07-30T09:26:45.000Z
|
2022-03-16T15:31:41.000Z
|
"""
meter
"""
from fvcore.common.timer import Timer
import torch
import json
import datetime
import numpy as np
from collections import defaultdict ,deque
import net.utils.logging_tool as logging
logger=logging.get_logger(__name__)
class ScalarMeter(object):
"""
a scalar meter to calue mean and sum
"""
def __init__(self,window_size):
self.dequa=deque(maxlen=window_size)
self.total=0.0
self.count=0
def reset(self):
self.dequa.clear()
self.total=0.0
self.count=0
def add_value(self,value):
self.dequa.append(value)
self.total+=value
self.count+=1
def get_win_median(self):
"""
calculate the median value in current dequa
:return:
"""
return np.median(self.dequa)
def get_win_avg(self):
"""
calculate mean value in dequa
:return:
"""
return np.mean(self.dequa)
def get_global_avg(self):
"""
calculate global mean value
:return:
"""
return self.total/self.count
class TrainMeter(object):
def __init__(self,epoch_iters ,cfg):
"""
Anormal detection in surveiallance videos
loss item
hinge loss smooth loss and sparsity loss
plus new hard instance bank loss
1 hinge loss for hard instance
2 min hard instance
:param epoch_iters: iters in one epoch
:param cfg:
"""
self._cfg=cfg
self.epoch_iters=epoch_iters
self.MAX_EPOCH=cfg.SOLVER.MAX_EPOCH * epoch_iters
self.iter_timer = Timer()
self.hinge_loss=ScalarMeter(cfg.LOG_PERIOD)
self.smooth_loss=ScalarMeter(cfg.LOG_PERIOD)
self.sparsity_loss=ScalarMeter(cfg.LOG_PERIOD)
self.combine_loss = ScalarMeter(cfg.LOG_PERIOD)
self.hard_instance_hinge_loss=ScalarMeter(cfg.LOG_PERIOD)
self.hard_instance_score_loss=ScalarMeter(cfg.LOG_PERIOD)
self.lr = None
self.num_samples=0
self.hinge_loss_total=0.0
self.smooth_loss_total=0.0
self.sparsity_loss_loss=0.0
self.combine_loss_total=0.0
self.hard_instance_hinge_loss_total=0.0
self.hard_instance_score_loss_total=0.0
def reset(self):
"""
reset meter
:return:
"""
self.lr = None
self.num_samples = 0
self.hinge_loss.reset()
self.smooth_loss.reset()
self.sparsity_loss.reset()
self.combine_loss.reset()
self.hard_instance_hinge_loss.reset()
self.hard_instance_score_loss.reset()
self.hinge_loss_total = 0.0
self.smooth_loss_total = 0.0
self.sparsity_loss_total = 0.0
self.combine_loss_total = 0.0
self.hard_instance_hinge_loss_total = 0.0
self.hard_instance_score_loss_total = 0.0
def iter_start(self):
"""
start to recode time
:return:
"""
self.iter_timer.reset()
def iter_stop(self):
"""
stop recode time
:return:
"""
self.iter_timer.pause()
def update_stats(self,mse_loss,entropy_loss,combine_loss,lr,mb_size):
# self.mse_loss.add_value(mse_loss)
# self.entropy_loss.add_value(entropy_loss)
# self.combine_loss.add_value(combine_loss)
# self.lr=lr
# self.loss_total+=loss*mb_size
# self.num_samples+=mb_size
pass
def update_stats_origin(self, combine_loss,hinge_loss,smooth_loss,sparsity_loss,lr,mb_size):
"""
Update the current stats for mixup x and y
this is kl loss
Args:
loss (float): loss value.
lr (float): learning rate.
mb_size (int): mini batch size.
"""
self.hinge_loss.add_value(hinge_loss)
self.smooth_loss.add_value(smooth_loss)
self.sparsity_loss.add_value(sparsity_loss)
self.combine_loss.add_value(combine_loss)
self.lr = lr
self.hinge_loss_total = hinge_loss*mb_size
self.smooth_loss_total=smooth_loss*mb_size
self.sparsity_loss_total=sparsity_loss*mb_size
self.combine_loss_total=combine_loss*mb_size
self.num_samples += mb_size
def update_stats_loss1(self, combine_loss, hinge_loss, smooth_loss, sparsity_loss, hard_instance_hinge_loss,lr, mb_size):
"""
Update the current stats for mixup x and y
this is kl loss
Args:
loss (float): loss value.
lr (float): learning rate.
mb_size (int): mini batch size.
"""
self.hinge_loss.add_value(hinge_loss)
self.smooth_loss.add_value(smooth_loss)
self.sparsity_loss.add_value(sparsity_loss)
self.combine_loss.add_value(combine_loss)
self.hard_instance_hinge_loss.add_value(hard_instance_hinge_loss)
self.lr = lr
self.hinge_loss_total = hinge_loss * mb_size
self.smooth_loss_total = smooth_loss * mb_size
self.sparsity_loss_total = sparsity_loss * mb_size
self.hard_instance_hinge_loss_total=hard_instance_hinge_loss*mb_size
self.combine_loss_total = combine_loss * mb_size
self.num_samples += mb_size
def update_stats_loss2(self, combine_loss, hinge_loss, smooth_loss, sparsity_loss, hard_instance_score_loss,lr, mb_size):
"""
Update the current stats for mixup x and y
this is kl loss
Args:
loss (float): loss value.
lr (float): learning rate.
mb_size (int): mini batch size.
"""
self.hinge_loss.add_value(hinge_loss)
self.smooth_loss.add_value(smooth_loss)
self.sparsity_loss.add_value(sparsity_loss)
self.combine_loss.add_value(combine_loss)
self.hard_instance_score_loss.add_value(hard_instance_score_loss)
self.lr = lr
self.hinge_loss_total = hinge_loss * mb_size
self.smooth_loss_total = smooth_loss * mb_size
self.sparsity_loss_total = sparsity_loss * mb_size
self.hard_instance_score_loss_total=hard_instance_score_loss*mb_size
self.combine_loss_total = combine_loss * mb_size
self.num_samples += mb_size
def update_stats_loss_combine(self, combine_loss, hinge_loss, smooth_loss, sparsity_loss, hard_instance_hinge_loss,hard_instance_score_loss,lr, mb_size):
"""
Update the current stats for mixup x and y
this is kl loss
Args:
loss (float): loss value.
lr (float): learning rate.
mb_size (int): mini batch size.
"""
self.hinge_loss.add_value(hinge_loss)
self.smooth_loss.add_value(smooth_loss)
self.sparsity_loss.add_value(sparsity_loss)
self.combine_loss.add_value(combine_loss)
self.hard_instance_hinge_loss.add_value(hard_instance_hinge_loss)
self.hard_instance_score_loss.add_value(hard_instance_score_loss)
self.lr = lr
self.hinge_loss_total = hinge_loss * mb_size
self.smooth_loss_total = smooth_loss * mb_size
self.sparsity_loss_total = sparsity_loss * mb_size
self.hard_instance_hinge_loss_total=hard_instance_hinge_loss*mb_size
self.hard_instance_score_loss_total=hard_instance_score_loss*mb_size
self.combine_loss_total = combine_loss * mb_size
self.num_samples += mb_size
def update_stats_loss4(self, combine_loss, hinge_loss, smooth_loss, sparsity_loss, hard_instance_hinge_loss,lr, mb_size):
"""
Update the current stats for mixup x and y
this is kl loss
Args:
loss (float): loss value.
lr (float): learning rate.
mb_size (int): mini batch size.
"""
self.hinge_loss.add_value(hinge_loss)
self.smooth_loss.add_value(smooth_loss)
self.sparsity_loss.add_value(sparsity_loss)
self.combine_loss.add_value(combine_loss)
self.hard_instance_hinge_loss.add_value(hard_instance_hinge_loss)
self.lr = lr
self.hinge_loss_total = hinge_loss * mb_size
self.smooth_loss_total = smooth_loss * mb_size
self.sparsity_loss_total = sparsity_loss * mb_size
self.hard_instance_hinge_loss_total=hard_instance_hinge_loss*mb_size
self.combine_loss_total = combine_loss * mb_size
self.num_samples += mb_size
def log_iter_stats(self,cur_epoch,cur_iter,mode):
"""
log the stats for cur iteration
:param cur_epoch:
:param cur_iter:
:return:
"""
if (cur_iter+1) % self._cfg.LOG_PERIOD!= 0:
return
iteration=(cur_epoch-1)*self.epoch_iters+cur_iter+1
eta_sec = self.iter_timer.seconds() * (
self.MAX_EPOCH - (cur_epoch * self.epoch_iters + cur_iter + 1)
)
eta = str(datetime.timedelta(seconds=int(eta_sec)))
if mode in ["origin",]:
stats = {
"_type": "train_iter",
"epoch": "{}/{}".format(cur_epoch , self._cfg.SOLVER.MAX_EPOCH),
"iter": "{}/{}".format(cur_iter + 1, self.epoch_iters),
"total_iter": "{}/{}".format(iteration,self._cfg.SOLVER.MAX_ITERATION),
"time_diff": self.iter_timer.seconds(),
"eta": eta,
"hinge_loss":self.hinge_loss.get_win_median(),
"smooth_loss":self.smooth_loss.get_win_median(),
"sparsity_loss":self.sparsity_loss.get_win_median(),
"combine_loss": self.combine_loss.get_win_median(),
"lr": self.lr,
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
elif mode in ["loss1"]:
stats = {
"_type": "train_iter",
"epoch": "{}/{}".format(cur_epoch , self._cfg.SOLVER.MAX_EPOCH),
"iter": "{}/{}".format(cur_iter + 1, self.epoch_iters),
"total_iter": "{}/{}".format(iteration, self._cfg.SOLVER.MAX_ITERATION),
"time_diff": self.iter_timer.seconds(),
"eta": eta,
"hinge_loss": self.hinge_loss.get_win_median(),
"smooth_loss": self.smooth_loss.get_win_median(),
"sparsity_loss": self.sparsity_loss.get_win_median(),
"hard_instance_hinge_loss":self.hard_instance_hinge_loss.get_win_median(),
"combine_loss": self.combine_loss.get_win_median(),
"lr": self.lr,
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
elif mode in ["loss2"]:
stats = {
"_type": "train_iter",
"epoch": "{}/{}".format(cur_epoch , self._cfg.SOLVER.MAX_EPOCH),
"iter": "{}/{}".format(cur_iter + 1, self.epoch_iters),
"total_iter": "{}/{}".format(iteration, self._cfg.SOLVER.MAX_ITERATION),
"time_diff": self.iter_timer.seconds(),
"eta": eta,
"hinge_loss": self.hinge_loss.get_win_median(),
"smooth_loss": self.smooth_loss.get_win_median(),
"sparsity_loss": self.sparsity_loss.get_win_median(),
"hard_instance_score_loss":self.hard_instance_score_loss.get_win_median(),
"combine_loss": self.combine_loss.get_win_median(),
"lr": self.lr,
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
elif mode in ["combine"]:
stats = {
"_type": "train_iter",
"epoch": "{}/{}".format(cur_epoch , self._cfg.SOLVER.MAX_EPOCH),
"iter": "{}/{}".format(cur_iter + 1, self.epoch_iters),
"total_iter": "{}/{}".format(iteration, self._cfg.SOLVER.MAX_ITERATION),
"time_diff": self.iter_timer.seconds(),
"eta": eta,
"hinge_loss": self.hinge_loss.get_win_median(),
"smooth_loss": self.smooth_loss.get_win_median(),
"sparsity_loss": self.sparsity_loss.get_win_median(),
"hard_instance_hinge_loss":self.hard_instance_hinge_loss.get_win_median(),
"hard_instance_score_loss":self.hard_instance_score_loss.get_win_median(),
"combine_loss": self.combine_loss.get_win_median(),
"lr": self.lr,
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
else:
raise NotImplementedError(
"Does not support state"
)
logging.log_json_stats(stats)
# stats={
# "_type": "train_iter",
# "epoch": "{}/{}".format(cur_epoch+1,self._cfg.SOLVER.MAX_EPOCH),
# "iter": "{}/{}".format(cur_iter+1,self.epoch_iters),
# "time":self.iter_timer.seconds(),
# "eta":eta,
# "mse_loss":self.mse_loss.get_win_median(),
# "entropy_loss": self.entropy_loss.get_win_median(),
# "combine_loss": self.combine_loss.get_win_median(),
# "lr":self.lr,
# "gpu":"{:.2f}GB".format(torch.cuda.max_memory_allocated()/1024**3)
# }
def log_epoch_stats(self,cur_epoch,mode):
"""
:param cur_epoch:
:return:
"""
stats = {
"_type": "train_epoch",
"epoch": "{}/{}".format(cur_epoch , self._cfg.SOLVER.MAX_EPOCH),
"time_diff": self.iter_timer.seconds(),
"lr": self.lr,
"hinge_loss": self.hinge_loss_total/self.num_samples,
"smooth_loss": self.smooth_loss_total/self.num_samples,
"sparsity_loss": self.sparsity_loss_total/self.num_samples,
"combine_loss": self.combine_loss_total/self.num_samples,
# "loss_combine": self.loss_combine_total / self.num_samples,
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
if mode in ["origin"]:
pass
elif mode in ["loss1"]:
stats["hard_instance_hinge_loss"] = self.hard_instance_hinge_loss_total / self.num_samples
elif mode in ["loss2"]:
stats["hard_instance_score_loss"] = self.hard_instance_score_loss_total / self.num_samples
elif mode in ["combine"]:
stats["hard_instance_hinge_loss"] = self.hard_instance_hinge_loss_total / self.num_samples
stats["hard_instance_score_loss"] = self.hard_instance_score_loss_total / self.num_samples
else:
raise NotImplementedError(
"unsupported mode:{} check the mode ".format(mode)
)
# if self._cfg.SOLVER.LOSS_TYPES>1:
# stats["cross_entropy_loss"]=self.loss_cross_entropy_total/self.num_samples
# stats["combine_loss"] = self.loss_combine_total / self.num_samples
# stats = {
# "_type": "train_epoch",
# "epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH),
# "time_diff": self.iter_timer.seconds(),
# "mse_loss":self.mse_loss.get_win_avg(),
# "entropy_loss":self.entropy_loss.get_win_avg(),
# "combine_loss":self.combine_loss.get_win_avg(),
# "gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
# }
logging.log_json_stats(stats)
class TestMeter(object):
def __init__(self,epoch_iters,cfg):
"""
:param epoch_iters: iters in one epoch
:param cfg:
"""
self._cfg=cfg
self.epoch_iters=epoch_iters
# self.loss=ScalarMeter(cfg.LOG_PERIOD)
self.mse_loss=ScalarMeter(cfg.LOG_PERIOD)
self.entropy_loss=ScalarMeter(cfg.LOG_PERIOD)
self.combine_loss=ScalarMeter(cfg.LOG_PERIOD)
self.iter_timer=Timer()
self.lr=None
# self.loss_total=0.0
self.MAX_EPOCH=cfg.SOLVER.MAX_EPOCH * epoch_iters
# self.num_samples=0
def reset(self):
"""
reset meter
:return:
"""
self.lr=None
self.mse_loss.reset()
self.entropy_loss.reset()
self.combine_loss.reset()
# self.loss_total=0.0
def iter_start(self):
"""
start to recode time
:return:
"""
self.iter_timer.reset()
def iter_stop(self):
"""
stop recode time
:return:
"""
self.iter_timer.pause()
def update_stats(self,mse_loss,entropy_loss,combine_loss,lr,mb_size):
self.mse_loss.add_value(mse_loss)
self.entropy_loss.add_value(entropy_loss)
self.combine_loss.add_value(combine_loss)
self.lr=lr
# self.loss_total+=loss*mb_size
# self.num_samples+=mb_size
def log_iter_stats(self,cur_epoch,cur_iter):
"""
log the stats for cur iteration
:param cur_epoch:
:param cur_iter:
:return:
"""
if (cur_iter+1) % self._cfg.LOG_PERIOD!= 0:
return
eta_sec = self.iter_timer.seconds() * (
self.MAX_EPOCH - (cur_epoch * self.epoch_iters + cur_iter + 1)
)
eta = str(datetime.timedelta(seconds=int(eta_sec)))
stats={
"_type": "train_iter",
"epoch": "{}/{}".format(cur_epoch+1,self._cfg.SOLVER.MAX_EPOCH),
"iter": "{}/{}".format(cur_iter+1,self.epoch_iters),
"time":self.iter_timer.seconds(),
"eta":eta,
"mse_loss":self.mse_loss.get_win_median(),
"entropy_loss": self.entropy_loss.get_win_median(),
"combine_loss": self.combine_loss.get_win_median(),
"lr":self.lr,
"gpu":"{:.2f}GB".format(torch.cuda.max_memory_allocated()/1024**3)
}
logging.log_json_stats(stats)
def log_epoch_stats(self,cur_epoch):
"""
:param cur_epoch:
:return:
"""
stats = {
"_type": "train_epoch",
"epoch": "{}/{}".format(cur_epoch + 1, self._cfg.SOLVER.MAX_EPOCH),
"time_diff": self.iter_timer.seconds(),
"mse_loss":self.mse_loss.get_win_avg(),
"entropy_loss":self.entropy_loss.get_win_avg(),
"combine_loss":self.combine_loss.get_win_avg(),
"gpu_mem": "{:.2f} GB".format(torch.cuda.max_memory_allocated()/1024**3),
}
logging.log_json_stats(stats)
| 34.390511
| 157
| 0.594556
| 2,346
| 18,846
| 4.430094
| 0.065644
| 0.056192
| 0.046185
| 0.054556
| 0.884923
| 0.851054
| 0.836813
| 0.806697
| 0.794862
| 0.790821
| 0
| 0.009008
| 0.293113
| 18,846
| 547
| 158
| 34.453382
| 0.77113
| 0.162156
| 0
| 0.731959
| 0
| 0
| 0.072787
| 0.013134
| 0
| 0
| 0
| 0
| 0
| 1
| 0.085911
| false
| 0.006873
| 0.024055
| 0
| 0.137457
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18514185b2b924ce71a54fcd195503af92554621
| 12,749
|
py
|
Python
|
pythia/models/modules.py
|
ghazaalbeh/pythia
|
ff20cbc4ddfc02513c4e56cd3eb1a4637fdf8fe2
|
[
"BSD-3-Clause"
] | null | null | null |
pythia/models/modules.py
|
ghazaalbeh/pythia
|
ff20cbc4ddfc02513c4e56cd3eb1a4637fdf8fe2
|
[
"BSD-3-Clause"
] | null | null | null |
pythia/models/modules.py
|
ghazaalbeh/pythia
|
ff20cbc4ddfc02513c4e56cd3eb1a4637fdf8fe2
|
[
"BSD-3-Clause"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
use_cuda = torch.cuda.is_available()
'''
NOTE: in all modules,
image_feat [N,D_image,H,W]
text [N,D_text]
attention [N,1,H,W]
'''
class SceneModule(nn.Module):
def __init__(self):
super(SceneModule,self).__init__()
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
N, _, H, W = input_image_feat.shape
res = torch.ones((N, 1, H, W))
att_grid = Variable(res)
att_grid = att_grid.cuda() if use_cuda else att_grid
return att_grid
class FindModule(nn.Module):
'''
Mapping image_feat_grid X text_param ->att.grid
(N,D_image,H,W) X (N,1,D_text) --> [N,1,H,W]
'''
def __init__(self, image_dim, text_dim, map_dim):
super(FindModule,self).__init__()
self.map_dim = map_dim
self.conv1 = nn.Conv2d(image_dim,map_dim,kernel_size=1)
self.conv2 = nn.Conv2d(map_dim, 1, kernel_size=1)
self.textfc = nn.Linear(text_dim,map_dim)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
image_mapped = self.conv1(input_image_feat) #(N, map_dim, H, W)
text_mapped = self.textfc(input_text).view(-1, self.map_dim,1,1).expand_as(image_mapped)
elmtwize_mult = image_mapped * text_mapped
elmtwize_mult = F.normalize(elmtwize_mult, p=2, dim=1) #(N, map_dim, H, W)
att_grid = self.conv2(elmtwize_mult) #(N, 1, H, W)
return att_grid
class FilterModule(nn.Module):
def __init__(self, findModule, andModule):
super(FilterModule,self).__init__()
self.andModule = andModule
self.findModule = findModule
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
find_result = self.findModule(input_image_feat,input_text,input_image_attention1,input_image_attention2)
att_grid = self.andModule(input_image_feat,input_text,input_image_attention1,find_result)
return att_grid
class FindSamePropertyModule(nn.Module):
def __init__(self,output_num_choice, image_dim, text_dim, map_dim):
super(FindSamePropertyModule,self).__init__()
self.out_num_choice = output_num_choice
self.image_dim = image_dim
self.map_dim = map_dim
self.text_fc = nn.Linear(text_dim, map_dim)
self.att_fc_1 = nn.Linear(image_dim, map_dim)
self.conv1 = nn.Conv2d(image_dim, map_dim, kernel_size=1)
self.conv2 = nn.Conv2d(map_dim, 1, kernel_size=1)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att_softmax_1 = F.softmax(input_image_attention1.view(-1, H * W),dim=1).view(-1, 1, H*W)
image_reshape = input_image_feat.view(-1,self.image_dim,H * W)
att_feat_1 = torch.sum(att_softmax_1 * image_reshape, dim=2) #[N, image_dim]
att_feat_1_mapped = self.att_fc_1(att_feat_1).view(-1, self.map_dim,1,1) #[N, map_dim,1,1]
text_mapped = self.text_fc(input_text).view(-1,self.map_dim,1,1)
image_mapped = self.conv1(input_image_feat) # (N, map_dim, H, W)
elmtwize_mult = image_mapped * text_mapped * att_feat_1_mapped #[N, map_dim, H, W]
elmtwize_mult = F.normalize(elmtwize_mult, p=2, dim=1)
att_grid = self.conv2(elmtwize_mult)
return att_grid
class TransformModule(nn.Module):
def __init__(self, image_dim, text_dim, map_dim,kernel_size=5, padding=2):
super(TransformModule,self).__init__()
self.map_dim = map_dim
self.conv1 = nn.Conv2d(1, map_dim, kernel_size=kernel_size, padding=padding)
self.conv2 = nn.Conv2d(map_dim, 1, kernel_size=1)
self.textfc = nn.Linear(text_dim,map_dim)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
image_att_mapped = self.conv1(input_image_attention1) #(N, map_dim, H, W)
text_mapped = self.textfc(input_text).view(-1, self.map_dim,1,1).expand_as(image_att_mapped)
elmtwize_mult = image_att_mapped * text_mapped
elmtwize_mult = F.normalize(elmtwize_mult, p=2, dim=1) #(N, map_dim, H, W)
att_grid = self.conv2(elmtwize_mult) #(N, 1, H, W)
return att_grid
class AndModule(nn.Module):
def __init__(self):
super(AndModule,self).__init__()
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
return torch.max(input_image_attention1, input_image_attention2)
class OrModule(nn.Module):
def __init__(self):
super(OrModule,self).__init__()
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
return torch.min(input_image_attention1, input_image_attention2)
class CountModule(nn.Module):
def __init__(self,output_num_choice, image_height, image_width):
super(CountModule,self).__init__()
self.out_num_choice = output_num_choice
self.lc_out = nn.Linear(image_height*image_width + 3, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att_all = input_image_attention1.view(-1, H*W) ##flatten attention to [N, H*W]
att_avg = torch.mean(att_all, 1, keepdim=True)
att_min = torch.min(att_all, 1, keepdim=True)[0]
att_max = torch.max(att_all,1, keepdim=True)[0]
att_concat = torch.cat((att_all, att_avg, att_min, att_max), 1)
scores = self.lc_out(att_concat)
return scores
class ExistModule(nn.Module):
def __init__(self,output_num_choice, image_height, image_width):
super(ExistModule,self).__init__()
self.out_num_choice = output_num_choice
self.lc_out = nn.Linear(image_height*image_width + 3, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att_all = input_image_attention1.view(-1, H*W) ##flatten attention to [N, H*W]
att_avg = torch.mean(att_all, 1, keepdim=True)
att_min = torch.min(att_all, 1, keepdim=True)[0]
att_max = torch.max(att_all, 1, keepdim=True)[0]
att_concat = torch.cat((att_all, att_avg, att_min, att_max), 1)
scores = self.lc_out(att_concat)
return scores
class EqualNumModule(nn.Module):
def __init__(self,output_num_choice, image_height, image_width):
super(EqualNumModule,self).__init__()
self.out_num_choice = output_num_choice
self.lc_out = nn.Linear(image_height*image_width *2 + 6, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att1_all = input_image_attention1.view(-1, H * W) ##flatten attention to [N, H*W]
att1_avg = torch.mean(att1_all, 1, keepdim=True)
att1_min = torch.min(att1_all, 1, keepdim=True)[0]
att1_max = torch.max(att1_all, 1, keepdim=True)[0]
att2_all = input_image_attention2.view(-1, H * W) ##flatten attention to [N, H*W]
att2_avg = torch.mean(att2_all, 1, keepdim=True)
att2_min = torch.min(att2_all, 1, keepdim=True)[0]
att2_max = torch.max(att2_all, 1, keepdim=True)[0]
att_concat = torch.cat((att1_all, att1_avg, att1_min, att1_max,att2_all, att2_avg, att2_min, att2_max), 1)
scores = self.lc_out(att_concat)
return scores
class MoreNumModule(nn.Module):
def __init__(self, output_num_choice, image_height, image_width):
super(MoreNumModule, self).__init__()
self.out_num_choice = output_num_choice
self.lc_out = nn.Linear(image_height * image_width * 2 + 6, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att1_all = input_image_attention1.view(-1, H * W) ##flatten attention to [N, H*W]
att1_avg = torch.mean(att1_all, 1, keepdim=True)
att1_min = torch.min(att1_all, 1, keepdim=True)[0]
att1_max = torch.max(att1_all, 1, keepdim=True)[0]
att2_all = input_image_attention2.view(-1, H * W) ##flatten attention to [N, H*W]
att2_avg = torch.mean(att2_all, 1, keepdim=True)
att2_min = torch.min(att2_all, 1, keepdim=True)[0]
att2_max = torch.max(att2_all, 1, keepdim=True)[0]
att_concat = torch.cat((att1_all, att1_avg, att1_min, att1_max, att2_all, att2_avg, att2_min, att2_max), 1)
scores = self.lc_out(att_concat)
return scores
class LessNumModule(nn.Module):
def __init__(self, output_num_choice, image_height, image_width):
super(LessNumModule, self).__init__()
self.out_num_choice = output_num_choice
self.lc_out = nn.Linear(image_height * image_width * 2 + 6, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att1_all = input_image_attention1.view(-1, H * W) ##flatten attention to [N, H*W]
att1_avg = torch.mean(att1_all, 1, keepdim=True)
att1_min = torch.min(att1_all, 1, keepdim=True)[0]
att1_max = torch.max(att1_all, 1, keepdim=True)[0]
att2_all = input_image_attention2.view(-1, H * W) ##flatten attention to [N, H*W]
att2_avg = torch.mean(att2_all, 1, keepdim=True)
att2_min = torch.min(att2_all, 1, keepdim=True)[0]
att2_max = torch.max(att2_all, 1, keepdim=True)[0]
att_concat = torch.cat((att1_all, att1_avg, att1_min, att1_max, att2_all, att2_avg, att2_min, att2_max), 1)
scores = self.lc_out(att_concat)
return scores
class SamePropertyModule(nn.Module):
def __init__(self,output_num_choice, image_dim, text_dim, map_dim):
super(SamePropertyModule,self).__init__()
self.out_num_choice = output_num_choice
self.image_dim = image_dim
self.text_fc = nn.Linear(text_dim, map_dim)
self.att_fc_1 = nn.Linear(image_dim, map_dim)
self.att_fc_2 = nn.Linear(image_dim, map_dim)
self.lc_out = nn.Linear(map_dim, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att_softmax_1 = F.softmax(input_image_attention1.view(-1, H * W),dim=1).view(-1, 1, H*W)
att_softmax_2 = F.softmax(input_image_attention2.view(-1, H * W), dim=1).view(-1, 1, H*W)
image_reshape = input_image_feat.view(-1,self.image_dim,H * W)
att_feat_1 = torch.sum(att_softmax_1 * image_reshape, dim=2) #[N, image_dim]
att_feat_2 = torch.sum(att_softmax_2 * image_reshape, dim=2)
att_feat_1_mapped = self.att_fc_1(att_feat_1) #[N, map_dim]
att_feat_2_mapped = self.att_fc_2(att_feat_2)
text_mapped = self.text_fc(input_text)
elmtwize_mult = att_feat_1_mapped * text_mapped * att_feat_2_mapped #[N, map_dim]
elmtwize_mult = F.normalize(elmtwize_mult, p=2, dim=1)
scores = self.lc_out(elmtwize_mult)
return scores
class DescribeModule(nn.Module):
def __init__(self,output_num_choice, image_dim, text_dim, map_dim):
super(DescribeModule,self).__init__()
self.out_num_choice = output_num_choice
self.image_dim = image_dim
self.text_fc = nn.Linear(text_dim, map_dim)
self.att_fc_1 = nn.Linear(image_dim, map_dim)
self.lc_out = nn.Linear(map_dim, self.out_num_choice)
def forward(self, input_image_feat, input_text, input_image_attention1=None, input_image_attention2=None):
H, W = input_image_attention1.shape[2:4]
att_softmax_1 = F.softmax(input_image_attention1.view(-1, H * W),dim=1).view(-1, 1, H*W)
image_reshape = input_image_feat.view(-1,self.image_dim,H * W) #[N,image_dim,H*W]
att_feat_1 = torch.sum(att_softmax_1 * image_reshape, dim=2) #[N, image_dim]
att_feat_1_mapped = self.att_fc_1(att_feat_1) #[N, map_dim]
text_mapped = self.text_fc(input_text)
elmtwize_mult = att_feat_1_mapped * text_mapped #[N, map_dim]
elmtwize_mult = F.normalize(elmtwize_mult, p=2, dim=1)
scores = self.lc_out(elmtwize_mult)
return scores
| 45.859712
| 115
| 0.691819
| 1,989
| 12,749
| 4.07089
| 0.055807
| 0.096332
| 0.086452
| 0.044461
| 0.866123
| 0.854514
| 0.833272
| 0.811906
| 0.801284
| 0.790169
| 0
| 0.030462
| 0.194054
| 12,749
| 278
| 116
| 45.859712
| 0.757567
| 0.045572
| 0
| 0.705314
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135266
| false
| 0
| 0.019324
| 0.009662
| 0.289855
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
187809ac1b228c565e09356890163cc67930b85f
| 5,367
|
py
|
Python
|
scale/data/test/data/test_value.py
|
kaydoh/scale
|
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
|
[
"Apache-2.0"
] | 121
|
2015-11-18T18:15:33.000Z
|
2022-03-10T01:55:00.000Z
|
scale/data/test/data/test_value.py
|
kaydoh/scale
|
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
|
[
"Apache-2.0"
] | 1,415
|
2015-12-23T23:36:04.000Z
|
2022-01-07T14:10:09.000Z
|
scale/data/test/data/test_value.py
|
kaydoh/scale
|
1b6a3b879ffe83e10d3b9d9074835a4c3bf476ee
|
[
"Apache-2.0"
] | 66
|
2015-12-03T20:38:56.000Z
|
2020-07-27T15:28:11.000Z
|
from __future__ import unicode_literals
import django
from django.test.testcases import TestCase
from data.data.exceptions import InvalidData
from data.data.value import FileValue, JsonValue
from data.interface.parameter import FileParameter, JsonParameter
class TestFileValue(TestCase):
"""Tests related to the FileValue class"""
def setUp(self):
django.setup()
def test_validate(self):
"""Tests calling FileValue.validate()"""
file_param = FileParameter('input_1', ['application/json'])
json_param = JsonParameter('input_1', 'string')
file_value = FileValue('input_1', [1234, 1235])
# Invalid parameter type
with self.assertRaises(InvalidData) as context:
file_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE')
# Zero files not accepted
file_value = FileValue('input_1', [])
with self.assertRaises(InvalidData) as context:
file_value.validate(file_param)
self.assertEqual(context.exception.error.name, 'NO_FILES')
# Multiple files not accepted
file_value = FileValue('input_1', [1234, 1235])
with self.assertRaises(InvalidData) as context:
file_value.validate(file_param)
self.assertEqual(context.exception.error.name, 'MULTIPLE_FILES')
# Valid data value
file_value = FileValue('input_1', [1234])
warnings = file_value.validate(file_param)
self.assertListEqual(warnings, [])
class TestJsonValue(TestCase):
"""Tests related to the JsonValue class"""
def setUp(self):
django.setup()
def test_validate(self):
"""Tests calling JsonValue.validate()"""
file_param = FileParameter('input_1', ['application/json'])
json_param = JsonParameter('input_1', 'string')
json_value = JsonValue('input_1', 'hello')
# Invalid parameter type
with self.assertRaises(InvalidData) as context:
json_value.validate(file_param)
self.assertEqual(context.exception.error.name, 'MISMATCHED_PARAM_TYPE')
# Invalid array
json_param = JsonParameter('input_1', 'array')
json_value = JsonValue('input_1', 123)
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Invalid boolean
json_param = JsonParameter('input_1', 'boolean')
json_value = JsonValue('input_1', 123)
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Invalid integer
json_param = JsonParameter('input_1', 'integer')
json_value = JsonValue('input_1', 123.5)
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Invalid number
json_param = JsonParameter('input_1', 'number')
json_value = JsonValue('input_1', 'foo')
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Invalid object
json_param = JsonParameter('input_1', 'object')
json_value = JsonValue('input_1', 123)
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Invalid string
json_param = JsonParameter('input_1', 'string')
json_value = JsonValue('input_1', 123)
with self.assertRaises(InvalidData) as context:
json_value.validate(json_param)
self.assertEqual(context.exception.error.name, 'INVALID_JSON_TYPE')
# Valid array value
json_param = JsonParameter('input_1', 'array')
json_value = JsonValue('input_1', [1, 2, 3])
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
# Valid boolean value
json_param = JsonParameter('input_1', 'boolean')
json_value = JsonValue('input_1', True)
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
# Valid integer value
json_param = JsonParameter('input_1', 'integer')
json_value = JsonValue('input_1', 1234)
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
# Valid number value
json_param = JsonParameter('input_1', 'number')
json_value = JsonValue('input_1', 1234.5)
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
# Valid object value
json_param = JsonParameter('input_1', 'object')
json_value = JsonValue('input_1', {'foo': 'bar'})
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
# Valid string value
json_param = JsonParameter('input_1', 'string')
json_value = JsonValue('input_1', 'hello')
warnings = json_value.validate(json_param)
self.assertListEqual(warnings, [])
| 37.531469
| 79
| 0.661636
| 591
| 5,367
| 5.796954
| 0.123519
| 0.057793
| 0.089901
| 0.110333
| 0.864273
| 0.849679
| 0.819031
| 0.807356
| 0.784005
| 0.728838
| 0
| 0.019627
| 0.231042
| 5,367
| 142
| 80
| 37.795775
| 0.810516
| 0.086641
| 0
| 0.758242
| 0
| 0
| 0.109764
| 0.008633
| 0
| 0
| 0
| 0
| 0.296703
| 1
| 0.043956
| false
| 0
| 0.065934
| 0
| 0.131868
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43e8cd923c082901769caa24f13d567ee9262e82
| 193
|
py
|
Python
|
product/product_brands.py
|
saiihamza/open_data_parsing
|
6757c6c6823a0523ca1d2af79e99b761b57a794d
|
[
"Apache-2.0"
] | null | null | null |
product/product_brands.py
|
saiihamza/open_data_parsing
|
6757c6c6823a0523ca1d2af79e99b761b57a794d
|
[
"Apache-2.0"
] | null | null | null |
product/product_brands.py
|
saiihamza/open_data_parsing
|
6757c6c6823a0523ca1d2af79e99b761b57a794d
|
[
"Apache-2.0"
] | null | null | null |
class ProductBrands(object):
def __init__(self, brands, brands_tags):
self.Brands = brands
self.BrandsTags = brands_tags
def __str__(self):
return self.Brands
| 21.444444
| 44
| 0.658031
| 22
| 193
| 5.318182
| 0.5
| 0.25641
| 0.273504
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.259067
| 193
| 8
| 45
| 24.125
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
a12d55c1938a278f539cc87fda6d8c5712a4518b
| 3,477
|
py
|
Python
|
model/vggish_utils/vggish_model_architecture.py
|
interactiveaudiolab/voogle
|
9654d8602b97bbe5d161092edbe9e4abd9833eeb
|
[
"MIT"
] | 28
|
2019-07-25T08:23:45.000Z
|
2022-03-27T06:14:32.000Z
|
model/vggish_utils/vggish_model_architecture.py
|
interactiveaudiolab/voogle
|
9654d8602b97bbe5d161092edbe9e4abd9833eeb
|
[
"MIT"
] | null | null | null |
model/vggish_utils/vggish_model_architecture.py
|
interactiveaudiolab/voogle
|
9654d8602b97bbe5d161092edbe9e4abd9833eeb
|
[
"MIT"
] | 1
|
2021-09-21T08:26:28.000Z
|
2021-09-21T08:26:28.000Z
|
import torch
import torch.nn as nn
class VGGish(nn.Module):
def __init__(self):
super(VGGish, self).__init__()
self.layer1_conv1 = nn.Sequential(nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1), nn.ReLU())
self.layer2_pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer3_conv2 = nn.Sequential(nn.Conv2d(64, 128,kernel_size=3, stride=1, padding=1), nn.ReLU())
self.layer4_pool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer5_conv3_1 = nn.Sequential(nn.Conv2d(128, 256,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer6_conv3_2 = nn.Sequential(nn.Conv2d(256, 256,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer7_pool3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer8_conv4_1 = nn.Sequential(nn.Conv2d(256, 512,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer9_conv4_2 = nn.Sequential(nn.Conv2d(512, 512,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer10_pool4 = nn.MaxPool2d(kernel_size=2, stride=2)
def forward(self, x):
x = x.view(x.size(0), 1, x.size(1), x.size(2))
out = self.layer1_conv1(x)
out = self.layer2_pool1(out)
out = self.layer3_conv2(out)
out = self.layer4_pool2(out)
out = self.layer5_conv3_1(out)
out = self.layer6_conv3_2(out)
out = self.layer7_pool3(out)
out = self.layer8_conv4_1(out)
out_emb1 = torch.mean(out, dim=2)
out_emb1 = out_emb1.view(out_emb1.size(0), -1)
out = self.layer9_conv4_2(out)
out_emb2 = torch.mean(out, dim=2)
out_emb2 = out_emb2.view(out_emb2.size(0), -1)
out = torch.cat((out_emb1, out_emb2), dim=1)
return out.view(-1)
class VGGish2s(nn.Module):
def __init__(self):
super(VGGish2s, self).__init__()
self.layer1_conv1 = nn.Sequential(nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1), nn.ReLU())
self.layer2_pool1 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer3_conv2 = nn.Sequential(nn.Conv2d(64, 128,kernel_size=3, stride=1, padding=1), nn.ReLU())
self.layer4_pool2 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer5_conv3_1 = nn.Sequential(nn.Conv2d(128, 256,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer6_conv3_2 = nn.Sequential(nn.Conv2d(256, 256,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer7_pool3 = nn.MaxPool2d(kernel_size=2, stride=2)
self.layer8_conv4_1 = nn.Sequential(nn.Conv2d(256, 512,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer9_conv4_2 = nn.Sequential(nn.Conv2d(512, 512,kernel_size=3, stride=1,padding=1), nn.ReLU())
self.layer10_pool4 = nn.MaxPool2d(kernel_size=2, stride=2)
def forward(self, x):
x = x.view(x.size(0), 1, x.size(1), x.size(2))
out = self.layer1_conv1(x)
out = self.layer2_pool1(out)
out = self.layer3_conv2(out)
out = self.layer4_pool2(out)
out = self.layer5_conv3_1(out)
out = self.layer6_conv3_2(out)
out = self.layer7_pool3(out)
out = self.layer8_conv4_1(out)
out_emb1 = torch.mean(out, dim=0)
out_emb1 = out_emb1.view(out_emb1.size(0), -1)
out = self.layer9_conv4_2(out)
out_emb2 = torch.mean(out, dim=0)
out_emb2 = out_emb2.view(out_emb2.size(0), -1)
out = torch.cat((out_emb1, out_emb2), dim=1)
return out.view(-1)
| 36.6
| 109
| 0.642508
| 559
| 3,477
| 3.799642
| 0.100179
| 0.094162
| 0.079096
| 0.112994
| 0.968927
| 0.968927
| 0.944444
| 0.944444
| 0.944444
| 0.944444
| 0
| 0.100327
| 0.208801
| 3,477
| 94
| 110
| 36.989362
| 0.671756
| 0
| 0
| 0.83871
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0
| 0.032258
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a142e9180b738573163a7dd6cf5e663e1c276c97
| 81,385
|
py
|
Python
|
tests/test_rules.py
|
scottporter/quantipy
|
82ef628d089ea9dbc47c91e842e50dd9e87bb29e
|
[
"MIT"
] | 67
|
2015-07-29T18:39:46.000Z
|
2022-01-10T12:32:26.000Z
|
tests/test_rules.py
|
scottporter/quantipy
|
82ef628d089ea9dbc47c91e842e50dd9e87bb29e
|
[
"MIT"
] | 1,052
|
2015-07-10T15:14:17.000Z
|
2021-11-14T11:14:58.000Z
|
tests/test_rules.py
|
scottporter/quantipy
|
82ef628d089ea9dbc47c91e842e50dd9e87bb29e
|
[
"MIT"
] | 15
|
2016-04-06T14:40:08.000Z
|
2020-08-12T18:36:30.000Z
|
import unittest
import os.path
import numpy as np
import pandas as pd
from pandas.util.testing import assert_frame_equal
import test_helper
import copy
from operator import lt, le, eq, ne, ge, gt
from pandas.core.index import Index
__index_symbol__ = {
Index.union: ',',
Index.intersection: '&',
Index.difference: '~',
Index.sym_diff: '^'
}
from collections import defaultdict, OrderedDict
from quantipy.core.stack import Stack
from quantipy.core.chain import Chain
from quantipy.core.link import Link
from quantipy.core.view_generators.view_mapper import ViewMapper
from quantipy.core.view_generators.view_maps import QuantipyViews
from quantipy.core.view import View
from quantipy.core.helpers import functions
from quantipy.core.helpers.functions import load_json
from quantipy.core.tools.dp.prep import (
frange,
frequency,
crosstab
)
from quantipy.core.tools.view.query import get_dataframe
from quantipy.core.dataset import DataSet
EXTENDED_TESTS = False
COUNTER = 0
class TestRules(unittest.TestCase):
def setUp(self):
self.path = './tests/'
project_name = 'Example Data (A)'
# Load Example Data (A) data and meta into self
name_data = '%s.csv' % (project_name)
path_data = '%s%s' % (self.path, name_data)
self.example_data_A_data = pd.DataFrame.from_csv(path_data)
name_meta = '%s.json' % (project_name)
path_meta = '%s%s' % (self.path, name_meta)
self.example_data_A_meta = load_json(path_meta)
# Variables by type for Example Data A
self.dk = 'Example Data (A)'
self.fk = 'no_filter'
self.single = ['gender', 'locality', 'ethnicity', 'religion', 'q1']
self.delimited_set = ['q2', 'q3', 'q8', 'q9']
self.q5 = ['q5_1', 'q5_2', 'q5_3']
def test_slicex(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## values
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def _get_dataset(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
dataset = DataSet('rules_test')
dataset.set_verbose_infomsg(False)
dataset.from_components(data, meta)
return dataset
def _get_stack_with_links(self, dataset, x=None, y=None, w=None):
stack = Stack()
stack.add_data(dataset.name, dataset._data, dataset._meta)
if not x: x = '@'
if not y: y = '@'
stack.add_link(x=x, y=y, weights=w)
return stack
def test_sortx_summaries_mean(self):
dataset = self._get_dataset()
x = 'q5'
y = '@'
dataset.sorting(x, on='mean')
stack = self._get_stack_with_links(dataset, x)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['q5_4', 'q5_6', 'q5_1', 'q5_3', 'q5_5', 'q5_2']
self.assertEqual(actual_order, expected_order)
def test_sortx_summaries_value(self):
dataset = self._get_dataset()
x = 'q5'
y = '@'
dataset.sorting(x, on=3, ascending=True)
stack = self._get_stack_with_links(dataset, x)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['q5_4', 'q5_5', 'q5_6', 'q5_1', 'q5_3', 'q5_2']
self.assertEqual(actual_order, expected_order)
def test_sortx_summaries_items(self):
dataset = self._get_dataset()
x = '@'
y = 'q5'
dataset.sorting(y, on='q5_2', ascending=False)
stack = self._get_stack_with_links(dataset, y=y)
stack.add_link(x=x, y=y, views=['cbase', 'counts', 'c%', 'mean'])
vks = ['x|f|x:|||cbase', 'x|f|:|||counts', 'x|f|:|y||c%',
'x|d.mean|x:|||mean']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=[y], rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for vk in vks:
v = chain['rules_test']['no_filter'][x][y][vk]
l = stack['rules_test']['no_filter'][x][y][vk]
if not 'd.mean' in vk and not 'cbase' in vk:
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = [3, 5, 98, 2, 1, 97, 4]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_within(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=True, between=False, fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['test A', 3, 2, 1, 4, 'test B', 97, 5, 6, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_between(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=False, between=True, ascending=True,
fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = [4, 'test B', 5, 6, 97, 'test A', 1, 2, 3, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx_expand_net_within_between(self):
dataset = self._get_dataset()
x = 'q2'
y = ['@', 'gender']
dataset.sorting(x, on='@', within=True, between=True, ascending=False,
fix=98)
stack = self._get_stack_with_links(dataset, x=x, y=y)
net = [{'test A': [1, 2, 3], 'text': {'en-GB': 'Lab1'}},
{'test B': [5, 6, 97], 'text': {'en-GB': 'Lab2'}}]
net_view = ViewMapper().make_template('frequency')
view_name = 'expandnet'
options = {'logic': net,
'expand': 'after',
'complete': True,
'axis': 'x',
'iterators': {'rel_to': [None, 'y']}}
net_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=net_view)
test_view = ViewMapper().make_template('coltests')
view_name = 'test'
options = {'level': 0.2}
test_view.add_method(view_name, kwargs=options)
stack.add_link(x=x, y=y, views=test_view)
vks = ['x|f|x[{1,2,3}+],x[{5,6,97}+]*:|||expandnet',
'x|f|x[{1,2,3}+],x[{5,6,97}+]*:|y||expandnet',
'x|t.props.Dim.20|x[{1,2,3}+],x[{5,6,97}+]*:|||test']
chains = stack.get_chain(data_keys=dataset.name,
filters='no_filter',
x=[x], y=y, rules=True,
views=vks,
orient_on='x')
chain = chains[0]
for yk in y:
for vk in vks:
v = chain['rules_test']['no_filter'][x][yk][vk]
l = stack['rules_test']['no_filter'][x][yk][vk]
check_chain_view_dataframe = v.dataframe.reindex_like(l.dataframe)
self.assertTrue(check_chain_view_dataframe.equals(l.dataframe))
actual_order = v.dataframe.index.get_level_values(1).tolist()
expected_order = ['test A', 3, 2, 1, 'test B', 97, 5, 6, 4, 98]
self.assertEqual(actual_order, expected_order)
def test_sortx(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## sort_on - default
meta['columns'][col_x]['rules'] = {'x': {'sortx': {}}}
meta['columns'][col_y]['rules'] = {'y': {'sortx': {}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sort_on - '@'
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'sort_on': '@'}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'sort_on': '@'}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 15, 12, 3, 11, 14, 6, 8, 10, 9, 5, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## fixed
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=True,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## with_weight
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'with_weight': 'weight_b'}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'with_weight': 'weight_b'}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 3, 15, 4, 5, 16, 12, 6, 10, 14, 11, 7, 13, 9, 8])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 7, 11, 3, 12, 15, 8, 9, 10, 5, 14, 6, 4, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_dropx(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## values
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_rules_frequency(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col = 'religion'
################## slicex
meta['columns'][col]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}},
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## sortx
meta['columns'][col]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}},
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[1, 3, 15, 5, 16, 10, 12, 14, 11, 7, 13, 8, 9, 6, 2, 4]),
'iswtd': index_items(col, all=True,
values=[1, 3, 15, 5, 16, 12, 10, 14, 11, 7, 13, 9, 8, 6, 2, 4])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## dropx
meta['columns'][col]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}},
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + sortx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [1, 2]}},
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [15, 16]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[4, 5, 6, 10, 12, 11, 7, 13, 8, 9, 1, 2]),
'iswtd': index_items(col, all=True,
values=[4, 5, 6, 12, 10, 11, 7, 13, 9, 8, 1, 2])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[10, 12, 14, 11, 7, 13, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[12, 10, 14, 11, 7, 13, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + dropx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}},
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[1, 5, 9, 13]),
'iswtd': index_items(col, all=True,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[4, 8, 12, 16]),
'iswtd': index_items(col, all=True,
values=[4, 8, 12, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## sortx + dropx
meta['columns'][col]['rules'] = {
'x': {
'sortx': {'fixed': [1, 2]},
'dropx': {'values': [5, 11, 13]}},
'y': {
'sortx': {'fixed': [15, 16]},
'dropx': {'values': [7, 13, 14]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[3, 15, 4, 16, 6, 10, 12, 14, 7, 8, 9, 1, 2]),
'iswtd': index_items(col, all=True,
values=[3, 15, 4, 16, 6, 12, 10, 14, 7, 9, 8, 1, 2])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[2, 1, 3, 4, 5, 6, 10, 12, 11, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[2, 1, 3, 4, 5, 6, 12, 10, 11, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
################## slicex + sortx + dropx
meta['columns'][col]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [11, 13]},
'dropx': {'values': [7]}},
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [15, 16]},
'dropx': {'values': [7, 13]}}}
rules_values_x = {
'unwtd': index_items(col, all=True,
values=[4, 5, 6, 10, 12, 8, 9, 11, 13]),
'iswtd': index_items(col, all=True,
values=[4, 5, 6, 12, 10, 9, 8, 11, 13])}
rules_values_y = {
'unwtd': index_items(col, all=True,
values=[10, 12, 14, 11, 8, 9, 15, 16]),
'iswtd': index_items(col, all=True,
values=[12, 10, 14, 11, 9, 8, 15, 16])}
confirm_frequencies(
self,
meta, data,
[None, 'weight_a'],
col,
rules_values_x,
rules_values_y)
def test_rules_crosstab(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=True,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=True,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=True,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=True,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=True,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=True,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=True,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=True,
values=[4, 8, 12, 16])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=True,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=True,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=True,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=True,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
confirm_crosstabs(
self,
meta, data,
[None, 'weight_a'],
col_x, col_y,
rules_values_x,
rules_values_y)
def test_rules_get_dataframe(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'rbase',
# 'ebase',
'counts', 'c%', 'r%',
'mean']
weights = [None, 'weight_a']
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=False,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=False,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=False,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=False,
values=[4, 8, 12, 16])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_get_dataframe(
self, stack, col_x, col_y,
rules_values_x, rules_values_y)
def test_rules_get_chain(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'religion'
col_y = 'ethnicity'
others = ['q5_1']
xks = [col_x]
yks = ['@', col_y] + others
test_views = [
'cbase', 'rbase',
# 'ebase',
'counts', 'c%', 'r%',
'mean']
weights = [None, 'weight_a']
################## slicex + sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [6, 11]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [11, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 10, 12, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 12, 10, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[15, 12, 14, 8, 10, 9, 7, 13]),
'iswtd': index_items(col_y, all=False,
values=[12, 15, 8, 9, 10, 14, 7, 13])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
if EXTENDED_TESTS:
################## slicex
meta['columns'][col_x]['rules'] = {
'x': {'slicex': {'values': [1, 3, 5, 7, 9, 10, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15]),
'iswtd': index_items(col_x, all=False,
values=[1, 3, 5, 7, 9, 10, 11, 13, 15])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_y, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## sortx
meta['columns'][col_x]['rules'] = {
'x': {'sortx': {'fixed': [5, 1, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [6, 2, 4]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 10, 12, 14, 11, 7, 13, 8, 9, 5, 1, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 15, 4, 16, 6, 12, 10, 14, 11, 7, 13, 9, 8, 5, 1, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 16, 7, 15, 12, 3, 11, 14, 8, 10, 9, 5, 13, 6, 2, 4]),
'iswtd': index_items(col_y, all=False,
values=[1, 16, 7, 12, 11, 3, 15, 8, 9, 10, 14, 5, 13, 6, 2, 4])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## dropx
meta['columns'][col_x]['rules'] = {
'x': {'dropx': {'values': [1, 3, 5, 7, 9, 11, 13, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4, 6, 8, 10, 12, 14, 16]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16]),
'iswtd': index_items(col_x, all=False,
values=[2, 4, 6, 8, 10, 12, 14, 16])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15]),
'iswtd': index_items(col_y, all=False,
values=[1, 3, 5, 7, 9, 11, 13, 15])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## slicex + sortx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': frange('4-13')},
'sortx': {'fixed': [4, 7, 3]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': frange('7-16')},
'sortx': {'fixed': [7, 11, 13]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[5, 6, 10, 12, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[5, 6, 12, 10, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[16, 15, 12, 14, 8, 10, 9, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[16, 12, 15, 8, 9, 10, 14, 7, 11, 13])}
stack = get_stack(self, meta, data, xks, yks, test_views, weights,
extras=True)
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## slicex + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'slicex': {'values': [1, 3, 5, 7, 9, 11, 13, 15]},
'dropx': {'values': [3, 7, 11, 15]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'slicex': {'values': [2, 4, 6, 8, 10, 12, 14, 16]},
'dropx': {'values': [2, 6, 10, 14]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[1, 5, 9, 13]),
'iswtd': index_items(col_x, all=False,
values=[1, 5, 9, 13])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[4, 8, 12, 16]),
'iswtd': index_items(col_y, all=False,
values=[4, 8, 12, 16])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
################## sortx + dropx
meta['columns'][col_x]['rules'] = {
'x': {
'sortx': {'fixed': [4, 7, 3]},
'dropx': {'values': [5, 10]}}}
meta['columns'][col_y]['rules'] = {
'y': {
'sortx': {'fixed': [7, 11, 13]},
'dropx': {'values': [4, 12]}}}
rules_values_x = {
'unwtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 8, 9, 4, 7, 3]),
'iswtd': index_items(col_x, all=False,
values=[2, 1, 15, 16, 6, 12, 14, 11, 13, 9, 8, 4, 7, 3])}
rules_values_y = {
'unwtd': index_items(col_y, all=False,
values=[1, 2, 16, 15, 3, 14, 6, 8, 10, 9, 5, 7, 11, 13]),
'iswtd': index_items(col_y, all=False,
values=[1, 2, 16, 3, 15, 8, 9, 10, 14, 5, 6, 7, 11, 13])}
confirm_xy_chains(
self, meta, data,
col_x, col_y, others,
test_views, weights,
rules_values_x, rules_values_y)
def test_rules_coltests(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'q5_1'
col_y = 'locality'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'counts', 'mean']
weights = [None]
dk = 'test'
fk = 'no_filter'
xk = col_x
yk = col_y
stack = get_stack(
self, meta, data, xks, yks, test_views, weights,
extras=True, coltests=True)
################## slicex
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 3]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
['[2]', np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [3, 1, 5]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[5]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [4, 1, 3]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, '[3, 4]', np.NaN],
[np.NaN, '[4]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 4]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
['[2, 4]', np.NaN, '[2]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## sortx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN, '[5]', np.NaN],
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
['[1]', np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
[np.NaN, '[1, 2]', np.NaN, np.NaN, np.NaN],
[np.NaN, '[1]', np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN, '[4, 5]', '[4]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[5]', np.NaN, np.NaN, '[2, 5]', np.NaN],
[np.NaN, np.NaN, np.NaN, '[3, 4, 5]', '[4, 5]'],
[np.NaN, np.NaN, np.NaN, '[4]', np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
['[1]', '[1, 2, 3, 4]', '[1, 2, 3]', np.NaN, '[1]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## dropx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 4]}}}
vk = 'x|t.props.askia.01|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN],
[np.NaN, np.NaN, '[2]'],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### net
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 3]}}}
vk = 'x|t.props.askia.10|x[{1,2,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[4]', np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### block net
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [2, 4]}}}
vk = 'x|t.props.askia.10|x[{1,2}],x[{2,3}],x[{1,3}]:|||askia tests'
rules_values_df = pd.DataFrame([
['[5]', '[5]', np.NaN],
['[3, 5]', np.NaN, np.NaN],
[np.NaN, np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
######### mean
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 3]}}}
vk = 'x|t.means.askia.10|x:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[2]', '[2, 4]']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
def test_rules_coltests_flag_bases(self):
meta = self.example_data_A_meta
data = self.example_data_A_data
col_x = 'q5_1'
col_y = 'locality'
xks = [col_x]
yks = ['@', col_y]
test_views = [
'cbase', 'counts', 'mean']
weights = [None]
dk = 'test'
fk = 'no_filter'
xk = col_x
yk = col_y
minimum = 1000
small = 2000
stack = get_stack(
self, meta, data, xks, yks, test_views, weights,
extras=True, coltests=True, flag_bases=[minimum, small])
################## slicex
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'slicex': {'values': [5, 2, 3]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
['**', np.NaN, '[2]*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*'],
['**', np.NaN, '*']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [True, False, False]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [False, False, True]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## sortx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'sortx': {'fixed': [1, 2]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
['[1, 2]*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', '[2, 3]', np.NaN],
['*', '**', '**', np.NaN, np.NaN],
['[1]*', '**', '**', np.NaN, '[1]'],
['*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', np.NaN, np.NaN],
['*', '**', '**', np.NaN, np.NaN]])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [False, True, True, False, False]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [True, False, False, False, False]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
################## dropx
######### counts
meta['columns'][col_y]['rules'] = {
'y': {'dropx': {'values': [1, 4]}}}
vk = 'x|t.props.Dim.05|:|||askia tests'
rules_values_df = pd.DataFrame([
[np.NaN, '[2]*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**'],
[np.NaN, '*', '**']])
keys = [dk, fk, xk, yk, vk]
df = get_dataframe(stack, keys=keys, rules=True)
cbase = 'x|f|x:|||cbase'
keys_cbase = [dk, fk, xk, yk, cbase]
df_cbase = get_dataframe(stack, keys=keys_cbase, rules=True)
is_minimum = [c<=minimum for c in df_cbase.values[0]]
is_small = [c>minimum and c<=small for c in df_cbase.values[0]]
actual = is_minimum
expected = [False, False, True]
self.assertSequenceEqual(actual, expected)
actual = is_small
expected = [False, True, False]
self.assertSequenceEqual(actual, expected)
actual = df.fillna(0).values.tolist()
expected = rules_values_df.fillna(0).values.tolist()
self.assertSequenceEqual(actual, expected)
# ##################### Helper functions #####################
def index_items(col, values, all=False):
"""
Return a correctly formed list of tuples to matching an index.
"""
items = [
(col, i)
for i in values
]
if all: items = [(col, 'All')] + items
return items
def confirm_frequencies(self, meta, data,
weights,
col,
rules_values_x,
rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
df = frequency(meta, data, x=col)
natural_x = df.index.values.tolist()
natural_y = natural_x
frequ_x = [(col, '@')]
frequ_y = frequ_x
for weight in weights:
if weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
# rules=True
fx = frequency(meta, data, x=col, weight=weight, rules=True)
fy = frequency(meta, data, y=col, weight=weight, rules=True)
# print fx
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
# rules=False
fx = frequency(meta, data, x=col, weight=weight, rules=False)
fy = frequency(meta, data, y=col, weight=weight, rules=False)
confirm_index_columns(self, fx, natural_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, natural_y)
# rules=x
fx = frequency(meta, data, x=col, weight=weight, rules=['x'])
fy = frequency(meta, data, y=col, weight=weight, rules=['x'])
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, natural_y)
# rules=y
fx = frequency(meta, data, x=col, weight=weight, rules=['y'])
fy = frequency(meta, data, y=col, weight=weight, rules=['y'])
confirm_index_columns(self, fx, natural_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
# rules=xy
fx = frequency(meta, data, x=col, weight=weight, rules=['x', 'y'])
fy = frequency(meta, data, y=col, weight=weight, rules=['x', 'y'])
confirm_index_columns(self, fx, rules_x, frequ_x)
confirm_index_columns(self, fy, frequ_x, rules_y)
def confirm_crosstabs(self, meta, data,
weights,
col_x, col_y,
rules_values_x,
rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
fx = frequency(meta, data, x=col_x)
natural_x = fx.index.values.tolist()
fy = frequency(meta, data, y=col_y)
natural_y = fy.columns.values.tolist()
for weight in weights:
if weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
for xtotal in [False, True]:
# rules=True
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=True, xtotal=xtotal)
confirm_index_columns(self, df, rules_x, rules_y)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
# rules=False
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=False, xtotal=xtotal)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x'], xtotal=xtotal)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['y'], xtotal=xtotal)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = crosstab(meta, data, col_x, col_y, weight=weight, rules=['x', 'y'], xtotal=xtotal)
confirm_index_columns(self, df, rules_x, rules_y)
def confirm_get_dataframe(self, stack, col_x, col_y,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
vks = stack.describe()['view'].values.tolist()
for xk in [col_x]:
keys[2] = xk
for yk in ['@', col_y]:
if xk=='@' and yk=='@':
continue
keys[3] = yk
for vk in vks:
keys[4] = vk
# if 'mean' in vk:
# print vk
rules_x, natural_x, rules_y, natural_y = get_xy_values(
meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y
)
# rules=True
df = get_dataframe(stack, keys=keys, rules=True)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(stack, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(stack, keys=keys, rules=['x'])
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(stack, keys=keys, rules=['y'])
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(stack, keys=keys, rules=['x', 'y'])
confirm_index_columns(self, df, rules_x, rules_y)
def confirm_xy_chains(self, meta, data, col_x, col_y, others, views, weights,
rules_values_x, rules_values_y):
stack = get_stack(
self, meta, data,
[col_x],
['@', col_y] + others,
views,
weights,
extras=True)
confirm_get_xchain(
self, stack, col_x, col_y, others,
rules_values_x, rules_values_y)
stack = get_stack(
self, meta, data,
[col_x] + others,
[col_y],
views,
weights,
extras=True)
confirm_get_ychain(
self, stack, col_x, col_y, others,
rules_values_x, rules_values_y)
def confirm_get_xchain(self, stack, col_x, col_y, others,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
xks = [col_x]
yks = ['@', col_y] + others
confirm_get_chain(
self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others)
def confirm_get_ychain(self, stack, col_x, col_y, others,
rules_values_x, rules_values_y):
"""
Confirms all variations of rules applied with frequency.
"""
keys = ['dk', 'fk', 'xk', 'yk', 'vk']
keys[0] = dk = 'test'
keys[1] = fk = 'no_filter'
keys[2] = xk = col_x
keys[3] = yk = col_y
meta = stack[dk].meta
data = stack[dk].data
xks = [col_x] + others
yks = [col_y]
confirm_get_chain(
self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others)
def confirm_get_chain(self,
meta, data,
stack, keys,
col_x, col_y,
xks, yks,
rules_values_x, rules_values_y,
others=[]):
vks = stack.describe()['view'].values.tolist()
weight = None
chain_true_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight)
chain_false_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight)
chain_x_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight)
chain_y_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight)
chain_xy_unwtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight)
weight = 'weight_a'
chain_true_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=True, rules_weight=weight)
chain_false_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=False, rules_weight=weight)
chain_x_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x'], rules_weight=weight)
chain_y_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['y'], rules_weight=weight)
chain_xy_wtd = stack.get_chain(x=xks, y=yks, views=vks, rules=['x', 'y'], rules_weight=weight)
for xk in xks:
keys[2] = xk
for yk in yks:
if xk=='@' and yk=='@':
continue
keys[3] = yk
for vk in vks:
keys[4] = vk
for weight in [None, 'weight_a']:
# if xk=='q5_1' and yk=='ethnicity' and vk=='x|f|x:|||ebase':
# print xk, yk, vk
# if vk=='x|f|:y|||rbase' and yk=='q5_1':
# print vk
rules_x, natural_x, rules_y, natural_y = get_xy_values(
meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y,
others,
rules_weight=weight
)
# rules=True
if weight is None:
df = get_dataframe(chain_true_unwtd, keys=keys, rules=False)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(chain_false_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(chain_x_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(chain_y_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(chain_xy_unwtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, rules_y)
else:
df = get_dataframe(chain_true_wtd, keys=keys, rules=False)
# print df
# print df.index
# print df.columns
# print zip(*rules_x)[1]
# print zip(*rules_y)[1]
confirm_index_columns(self, df, rules_x, rules_y)
# rules=False
df = get_dataframe(chain_false_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, natural_y)
# rules=x
df = get_dataframe(chain_x_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, natural_y)
# rules=y
df = get_dataframe(chain_y_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, natural_x, rules_y)
# rules=xy
df = get_dataframe(chain_xy_wtd, keys=keys, rules=False)
confirm_index_columns(self, df, rules_x, rules_y)
def get_xy_values(meta, data,
col_x, col_y,
xk, yk, vk,
rules_values_x, rules_values_y,
others=[], rules_weight='auto'):
v_method = vk.split('|')[1]
relation = vk.split('|')[2]
relative = vk.split('|')[3]
weight = vk.split('|')[4]
shortnam = vk.split('|')[5]
condensed_x = relation.split(":")[0].startswith('x') or v_method.startswith('d.')
condensed_y = relation.split(":")[1].startswith('y')
if rules_weight=='auto':
rules_weight = None if weight=='' else weight
if rules_weight is None:
rules_x = rules_values_x['unwtd']
rules_y = rules_values_y['unwtd']
else:
rules_x = rules_values_x['iswtd']
rules_y = rules_values_y['iswtd']
if xk in others:
fx = frequency(meta, data, x=xk)
natural_x = fx.index.values.tolist()
natural_x.remove((xk, 'All'))
rules_x = natural_x
if condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(xk, 'bn1'),
(xk, 'bn2'),
(xk, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(xk, 'All')]
else:
rules_x = natural_x = [(xk, shortnam)]
elif xk=='@':
if condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(col_x, 'bn1'),
(col_x, 'bn2'),
(col_x, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(col_y, 'All')]
else:
rules_x = natural_x = [(col_y, shortnam)]
else:
rules_x = natural_x = [(col_y, '@')]
elif condensed_x:
if shortnam=='Block net':
rules_x = natural_x = [
(col_x, 'bn1'),
(col_x, 'bn2'),
(col_x, 'bn3')]
elif shortnam in ['cbase', 'ebase']:
rules_x = natural_x = [(xk, 'All')]
else:
rules_x = natural_x = [(xk, shortnam)]
else:
fx = frequency(meta, data, x=col_x)
natural_x = fx.index.values.tolist()
natural_x.remove((col_x, 'All'))
if yk in others:
fy = frequency(meta, data, y=yk)
natural_y = fy.columns.values.tolist()
natural_y.remove((yk, 'All'))
rules_y = natural_y
if condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(yk, 'bn1'),
(yk, 'bn2'),
(yk, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(yk, 'All')]
else:
rules_y = natural_y = [(yk, shortnam)]
elif yk=='@':
if condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(col_y, 'bn1'),
(col_y, 'bn2'),
(col_y, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(col_x, 'All')]
else:
rules_y = natural_y = [(col_x, shortnam)]
else:
rules_y = natural_y = [(col_x, '@')]
elif condensed_y:
if shortnam=='Block net':
rules_y = natural_y = [
(col_y, 'bn1'),
(col_y, 'bn2'),
(col_y, 'bn3')]
elif shortnam in ['rbase']:
rules_y = natural_y = [(col_y, 'All')]
else:
rules_y = natural_y = [(col_y, shortnam)]
else:
fy = frequency(meta, data, y=col_y)
natural_y = fy.columns.values.tolist()
natural_y.remove((col_y, 'All'))
return rules_x, natural_x, rules_y, natural_y
def str_index_values(index):
"""
Make sure level 1 of the multiindex are all strings
"""
values = index.values.tolist()
values = zip(*[zip(*values)[0], [str(i) for i in zip(*values)[1]]])
return values
def confirm_index_columns(self, df, expected_x, expected_y):
"""
Confirms index and columns are as expected.
"""
# global COUNTER
# actual_x = str_index_values(df.index)
# actual_y = str_index_values(df.columns)
actual_x = df.index.values.tolist()
actual_y = df.columns.values.tolist()
# print
# print actual_x
# print expected_x
# print actual_y
# print expected_y
# Remove xtotal from columns if present
if len(df.columns.levels[0])>1:
actual_y = actual_y[1:]
self.assertEqual(actual_x, expected_x)
self.assertEqual(actual_y, expected_y)
# COUNTER = COUNTER + 2
# print COUNTER
def get_stack(self, meta, data, xks, yks, views, weights,
extras=False, coltests=False, flag_bases=None):
stack = Stack('test')
stack.add_data('test', data, meta)
stack.add_link(x=xks, y=yks, views=views, weights=weights)
if extras or coltests:
# Add a basic net
net_views = ViewMapper(
template={
'method': QuantipyViews().frequency,
'kwargs': {'iterators': {'rel_to': [None, 'y']}}})
net_views.add_method(
name='Net 1-3',
kwargs={'logic': [1, 2, 3], 'axis': 'x',
'text': {'en-GB': '1-3'}})
stack.add_link(x=xks, y=yks, views=net_views, weights=weights)
# Add block net
net_views.add_method(
name='Block net',
kwargs={
'logic': [
{'bn1': [1, 2]},
{'bn2': [2, 3]},
{'bn3': [1, 3]}], 'axis': 'x'})
stack.add_link(x=xks, y=yks, views=net_views.subset(['Block net']), weights=weights)
# Add NPS
## TO DO
# Add standard deviation
stddev_views = ViewMapper(
template = {
'method': QuantipyViews().descriptives,
'kwargs': {'stats': 'stddev'}})
stddev_views.add_method(name='stddev')
stack.add_link(x=xks, y=yks, views=stddev_views, weights=weights)
if coltests:
if flag_bases is None:
test_views = ViewMapper(
template={
'method': QuantipyViews().coltests,
'kwargs': {
'mimic': 'askia',
'iterators': {
'metric': ['props', 'means'],
'level': ['low', 'mid', 'high']}}})
else:
test_views = ViewMapper(
template={
'method': QuantipyViews().coltests,
'kwargs': {
'mimic': 'Dim',
'flag_bases': flag_bases,
'iterators': {
'metric': ['props', 'means'],
'level': ['low', 'mid', 'high']}}})
test_views.add_method('askia tests')
stack.add_link(x=xks, y=yks, views=test_views)
return stack
| 35.539301
| 100
| 0.460404
| 10,127
| 81,385
| 3.521378
| 0.029723
| 0.063543
| 0.050307
| 0.027201
| 0.894226
| 0.879112
| 0.869269
| 0.855977
| 0.846134
| 0.826253
| 0
| 0.061056
| 0.369896
| 81,385
| 2,289
| 101
| 35.554827
| 0.634348
| 0.033581
| 0
| 0.799176
| 0
| 0.005889
| 0.083845
| 0.010701
| 0
| 0
| 0
| 0
| 0.021201
| 1
| 0.017668
| false
| 0
| 0.012367
| 0
| 0.034158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62bd84d6cca7d8773c7b87f8027470e1d6039cbb
| 23,081
|
py
|
Python
|
sdk/python/pulumi_gcp/diagflow/cx_environment.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/diagflow/cx_environment.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/diagflow/cx_environment.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['CxEnvironmentArgs', 'CxEnvironment']
@pulumi.input_type
class CxEnvironmentArgs:
def __init__(__self__, *,
display_name: pulumi.Input[str],
version_configs: pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]],
description: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a CxEnvironment resource.
:param pulumi.Input[str] display_name: The human-readable name of the environment (unique in an agent). Limit of 64 characters.
:param pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]] version_configs: A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
:param pulumi.Input[str] description: The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
:param pulumi.Input[str] parent: The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "version_configs", version_configs)
if description is not None:
pulumi.set(__self__, "description", description)
if parent is not None:
pulumi.set(__self__, "parent", parent)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Input[str]:
"""
The human-readable name of the environment (unique in an agent). Limit of 64 characters.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: pulumi.Input[str]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter(name="versionConfigs")
def version_configs(self) -> pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]]:
"""
A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
return pulumi.get(self, "version_configs")
@version_configs.setter
def version_configs(self, value: pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]]):
pulumi.set(self, "version_configs", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@pulumi.input_type
class _CxEnvironmentState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
version_configs: Optional[pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]]] = None):
"""
Input properties used for looking up and filtering CxEnvironment resources.
:param pulumi.Input[str] description: The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
:param pulumi.Input[str] display_name: The human-readable name of the environment (unique in an agent). Limit of 64 characters.
:param pulumi.Input[str] name: The name of the environment.
:param pulumi.Input[str] parent: The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
:param pulumi.Input[str] update_time: Update time of this environment. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine
fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
:param pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]] version_configs: A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if name is not None:
pulumi.set(__self__, "name", name)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
if version_configs is not None:
pulumi.set(__self__, "version_configs", version_configs)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[pulumi.Input[str]]:
"""
The human-readable name of the environment (unique in an agent). Limit of 64 characters.
"""
return pulumi.get(self, "display_name")
@display_name.setter
def display_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "display_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the environment.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
Update time of this environment. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine
fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
@property
@pulumi.getter(name="versionConfigs")
def version_configs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]]]:
"""
A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
return pulumi.get(self, "version_configs")
@version_configs.setter
def version_configs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['CxEnvironmentVersionConfigArgs']]]]):
pulumi.set(self, "version_configs", value)
class CxEnvironment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
version_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CxEnvironmentVersionConfigArgs']]]]] = None,
__props__=None):
"""
Represents an environment for an agent. You can create multiple versions of your agent and publish them to separate environments.
When you edit an agent, you are editing the draft agent. At any point, you can save the draft agent as an agent version, which is an immutable snapshot of your agent.
When you save the draft agent, it is published to the default environment. When you create agent versions, you can publish them to custom environments. You can create a variety of custom environments for testing, development, production, etc.
To get more information about Environment, see:
* [API documentation](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.environments)
* How-to Guides
* [Official Documentation](https://cloud.google.com/dialogflow/cx/docs)
## Example Usage
### Dialogflowcx Environment Full
```python
import pulumi
import pulumi_gcp as gcp
agent = gcp.diagflow.CxAgent("agent",
display_name="dialogflowcx-agent",
location="global",
default_language_code="en",
supported_language_codes=[
"fr",
"de",
"es",
],
time_zone="America/New_York",
description="Example description.",
avatar_uri="https://cloud.google.com/_static/images/cloud/icons/favicons/onecloud/super_cloud.png",
enable_stackdriver_logging=True,
enable_spell_correction=True,
speech_to_text_settings=gcp.diagflow.CxAgentSpeechToTextSettingsArgs(
enable_speech_adaptation=True,
))
version1 = gcp.diagflow.CxVersion("version1",
parent=agent.start_flow,
display_name="1.0.0",
description="version 1.0.0")
development = gcp.diagflow.CxEnvironment("development",
parent=agent.id,
display_name="Development",
description="Development Environment",
version_configs=[gcp.diagflow.CxEnvironmentVersionConfigArgs(
version=version1.id,
)])
```
## Import
Environment can be imported using any of these accepted formats
```sh
$ pulumi import gcp:diagflow/cxEnvironment:CxEnvironment default {{parent}}/environments/{{name}}
```
```sh
$ pulumi import gcp:diagflow/cxEnvironment:CxEnvironment default {{parent}}/{{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
:param pulumi.Input[str] display_name: The human-readable name of the environment (unique in an agent). Limit of 64 characters.
:param pulumi.Input[str] parent: The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CxEnvironmentVersionConfigArgs']]]] version_configs: A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: CxEnvironmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents an environment for an agent. You can create multiple versions of your agent and publish them to separate environments.
When you edit an agent, you are editing the draft agent. At any point, you can save the draft agent as an agent version, which is an immutable snapshot of your agent.
When you save the draft agent, it is published to the default environment. When you create agent versions, you can publish them to custom environments. You can create a variety of custom environments for testing, development, production, etc.
To get more information about Environment, see:
* [API documentation](https://cloud.google.com/dialogflow/cx/docs/reference/rest/v3/projects.locations.agents.environments)
* How-to Guides
* [Official Documentation](https://cloud.google.com/dialogflow/cx/docs)
## Example Usage
### Dialogflowcx Environment Full
```python
import pulumi
import pulumi_gcp as gcp
agent = gcp.diagflow.CxAgent("agent",
display_name="dialogflowcx-agent",
location="global",
default_language_code="en",
supported_language_codes=[
"fr",
"de",
"es",
],
time_zone="America/New_York",
description="Example description.",
avatar_uri="https://cloud.google.com/_static/images/cloud/icons/favicons/onecloud/super_cloud.png",
enable_stackdriver_logging=True,
enable_spell_correction=True,
speech_to_text_settings=gcp.diagflow.CxAgentSpeechToTextSettingsArgs(
enable_speech_adaptation=True,
))
version1 = gcp.diagflow.CxVersion("version1",
parent=agent.start_flow,
display_name="1.0.0",
description="version 1.0.0")
development = gcp.diagflow.CxEnvironment("development",
parent=agent.id,
display_name="Development",
description="Development Environment",
version_configs=[gcp.diagflow.CxEnvironmentVersionConfigArgs(
version=version1.id,
)])
```
## Import
Environment can be imported using any of these accepted formats
```sh
$ pulumi import gcp:diagflow/cxEnvironment:CxEnvironment default {{parent}}/environments/{{name}}
```
```sh
$ pulumi import gcp:diagflow/cxEnvironment:CxEnvironment default {{parent}}/{{name}}
```
:param str resource_name: The name of the resource.
:param CxEnvironmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(CxEnvironmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
version_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CxEnvironmentVersionConfigArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = CxEnvironmentArgs.__new__(CxEnvironmentArgs)
__props__.__dict__["description"] = description
if display_name is None and not opts.urn:
raise TypeError("Missing required property 'display_name'")
__props__.__dict__["display_name"] = display_name
__props__.__dict__["parent"] = parent
if version_configs is None and not opts.urn:
raise TypeError("Missing required property 'version_configs'")
__props__.__dict__["version_configs"] = version_configs
__props__.__dict__["name"] = None
__props__.__dict__["update_time"] = None
super(CxEnvironment, __self__).__init__(
'gcp:diagflow/cxEnvironment:CxEnvironment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
display_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None,
version_configs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CxEnvironmentVersionConfigArgs']]]]] = None) -> 'CxEnvironment':
"""
Get an existing CxEnvironment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
:param pulumi.Input[str] display_name: The human-readable name of the environment (unique in an agent). Limit of 64 characters.
:param pulumi.Input[str] name: The name of the environment.
:param pulumi.Input[str] parent: The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
:param pulumi.Input[str] update_time: Update time of this environment. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine
fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['CxEnvironmentVersionConfigArgs']]]] version_configs: A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _CxEnvironmentState.__new__(_CxEnvironmentState)
__props__.__dict__["description"] = description
__props__.__dict__["display_name"] = display_name
__props__.__dict__["name"] = name
__props__.__dict__["parent"] = parent
__props__.__dict__["update_time"] = update_time
__props__.__dict__["version_configs"] = version_configs
return CxEnvironment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The human-readable description of the environment. The maximum length is 500 characters. If exceeded, the request is rejected.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> pulumi.Output[str]:
"""
The human-readable name of the environment (unique in an agent). Limit of 64 characters.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the environment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[Optional[str]]:
"""
The Agent to create an Environment for.
Format: projects/<Project ID>/locations/<Location ID>/agents/<Agent ID>.
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
Update time of this environment. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine
fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter(name="versionConfigs")
def version_configs(self) -> pulumi.Output[Sequence['outputs.CxEnvironmentVersionConfig']]:
"""
A list of configurations for flow versions. You should include version configs for all flows that are reachable from [Start Flow][Agent.start_flow] in the agent. Otherwise, an error will be returned.
Structure is documented below.
"""
return pulumi.get(self, "version_configs")
| 47.985447
| 320
| 0.658464
| 2,656
| 23,081
| 5.564759
| 0.106928
| 0.060284
| 0.050203
| 0.047632
| 0.867388
| 0.850947
| 0.824899
| 0.801691
| 0.792287
| 0.780176
| 0
| 0.012605
| 0.243794
| 23,081
| 480
| 321
| 48.085417
| 0.834193
| 0.487024
| 0
| 0.611374
| 1
| 0
| 0.118327
| 0.033696
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156398
| false
| 0.004739
| 0.033175
| 0
| 0.28436
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c52946ef091d047c49f7e77011da75fcf906448a
| 172
|
py
|
Python
|
apps/graph/purpleserver/graph/tests/__init__.py
|
rcknr/purplship-server
|
f8ec35af3da870fada0e989c20a8349c958c637c
|
[
"ECL-2.0",
"Apache-2.0"
] | 12
|
2020-02-03T08:11:21.000Z
|
2021-04-13T02:00:38.000Z
|
apps/graph/purpleserver/graph/tests/__init__.py
|
rcknr/purplship-server
|
f8ec35af3da870fada0e989c20a8349c958c637c
|
[
"ECL-2.0",
"Apache-2.0"
] | 9
|
2020-02-12T00:25:08.000Z
|
2021-04-20T10:31:59.000Z
|
apps/graph/purpleserver/graph/tests/__init__.py
|
rcknr/purplship-server
|
f8ec35af3da870fada0e989c20a8349c958c637c
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2020-02-03T08:10:50.000Z
|
2021-04-13T15:17:12.000Z
|
from purpleserver.graph.tests.test_templates import *
from purpleserver.graph.tests.test_carrier_connections import *
from purpleserver.graph.tests.test_user_info import *
| 43
| 63
| 0.860465
| 23
| 172
| 6.217391
| 0.478261
| 0.335664
| 0.440559
| 0.545455
| 0.713287
| 0.503497
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 172
| 3
| 64
| 57.333333
| 0.89375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
3d95d0e01f7c836e75e64d1880639cca77a1d80f
| 2,691
|
py
|
Python
|
ProdTracker/accounts/forms.py
|
rishthas/ProdTracker
|
c62927b95cabb6b81752239eaee0f2f722fb1523
|
[
"MIT"
] | null | null | null |
ProdTracker/accounts/forms.py
|
rishthas/ProdTracker
|
c62927b95cabb6b81752239eaee0f2f722fb1523
|
[
"MIT"
] | null | null | null |
ProdTracker/accounts/forms.py
|
rishthas/ProdTracker
|
c62927b95cabb6b81752239eaee0f2f722fb1523
|
[
"MIT"
] | null | null | null |
from django import forms
from django.contrib.auth.forms import UserCreationForm,UserChangeForm,SetPasswordForm,PasswordChangeForm
from django.contrib.auth.models import User
from .models import Role
class SignUpForm(UserCreationForm):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder': 'User Name'}),help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.")
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control form-control-sm ','placeholder':'Password'}),label='Password',help_text="Please enter the required password")
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'form-control form-control-sm ','placeholder':'Password'}),label='Confirm Password',help_text="Please re-enter the password")
first_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder':'First Name'}),max_length=30, required=True, )
last_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder':'First Name'}),max_length=30, required=False, )
email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'form-control form-control-sm '}),max_length=254, help_text='Required. Inform a valid email address.')
role_id = forms.ModelChoiceField(widget=forms.Select(attrs={'class':'form-control form-control-sm ',}),required=True,queryset=Role.objects.filter().all())
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email', 'password1', 'password2', 'role_id')
class EditUserForm(UserChangeForm):
username = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder': 'User Name','readonly':'true'}),help_text="Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.")
first_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder':'First Name'}),max_length=30, required=True, )
last_name = forms.CharField(widget=forms.TextInput(attrs={'class':'form-control form-control-sm ','placeholder':'First Name'}),max_length=30, required=False, )
email = forms.EmailField(widget=forms.EmailInput(attrs={'class': 'form-control form-control-sm '}),max_length=254, help_text='Required. Inform a valid email address.')
role_id = forms.ModelChoiceField(widget=forms.Select(attrs={'class':'form-control form-control-sm ',}),required=True,queryset=Role.objects.all())
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email','role_id')
| 103.5
| 232
| 0.735786
| 341
| 2,691
| 5.730205
| 0.214076
| 0.135107
| 0.085977
| 0.128966
| 0.798362
| 0.798362
| 0.798362
| 0.798362
| 0.798362
| 0.798362
| 0
| 0.009885
| 0.097733
| 2,691
| 26
| 233
| 103.5
| 0.794893
| 0
| 0
| 0.416667
| 0
| 0
| 0.369242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.166667
| 0.166667
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
3dafecb41923c93bbd614308150ac1af74de9944
| 206
|
py
|
Python
|
catalyst/contrib/nn/schedulers/__init__.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | 4
|
2019-12-14T07:27:09.000Z
|
2021-03-23T14:34:37.000Z
|
catalyst/contrib/nn/schedulers/__init__.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | 1
|
2021-01-07T16:13:45.000Z
|
2021-01-21T09:27:54.000Z
|
catalyst/contrib/nn/schedulers/__init__.py
|
gr33n-made/catalyst
|
bd413abc908ef7cbdeab42b0e805277a791e3ddb
|
[
"Apache-2.0"
] | 1
|
2020-12-02T18:42:31.000Z
|
2020-12-02T18:42:31.000Z
|
# flake8: noqa
from torch.optim.lr_scheduler import *
from catalyst.contrib.nn.schedulers.base import BaseScheduler, BatchScheduler
from catalyst.contrib.nn.schedulers.onecycle import OneCycleLRWithWarmup
| 34.333333
| 77
| 0.849515
| 25
| 206
| 6.96
| 0.68
| 0.137931
| 0.218391
| 0.241379
| 0.356322
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005291
| 0.082524
| 206
| 5
| 78
| 41.2
| 0.915344
| 0.058252
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3dba538fa43b27e4f6103171f7c08be0f9e79342
| 32,156
|
py
|
Python
|
src/analysis/chunk_fix_channel_dropping.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
src/analysis/chunk_fix_channel_dropping.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
src/analysis/chunk_fix_channel_dropping.py
|
Darilbii/Songbird_LFP_Paper
|
20131134353ffc4702eed490fcc3fefec9b08e32
|
[
"MIT"
] | null | null | null |
import BirdSongToolbox.chunk_analysis_tools as cat
import numpy as np
import scipy
from src.analysis.ml_pipeline_utilities import all_bad_channels, all_drop_temps, all_label_instructions
# Functions added for the Report
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
import BirdSongToolbox.free_epoch_tools as fet
from BirdSongToolbox.import_data import ImportData
from BirdSongToolbox.file_utility_functions import _save_numpy_data, _load_numpy_data
import src.analysis.ml_pipeline_utilities as mlpu
from src.analysis.chunk_parameter_sweep_bin_offset import get_priors
import src.analysis.hilbert_based_pipeline as hbp
from src.analysis.chunk_feature_dropping_pearson import best_bin_width, best_offset
import warnings
channel_drop_path = '/home/debrown/channel_dropping_results'
def get_feature_dropping_corrections_repeats(bird_id='z007', session='day-2016-09-09', feat_type: str = 'pow', verbose=True):
""" Import the results of make_parameter_sweep
:param bird_id:
:param session:
:param verbose:
:return:
accuracy : ndarray, (bin_widths, offsets, num_folds, frequencies)
ndarray of the k-fold accuracies
confusions : ndarray, (bin_widths, offsets, num_folds, frequencies)
ndarray of the k-fold confusion matrices
"""
assert feat_type in ['pow', 'phase', 'both'], "invalid feat_type"
mean_correction_name = "accuracy_repeat_" + feat_type
mean_correction = _load_numpy_data(data_name=mean_correction_name, bird_id=bird_id, session=session,
source=channel_drop_path, verbose=verbose)
std_correction_name = "std_all_repeat_" + feat_type
std_correction = _load_numpy_data(data_name=std_correction_name, bird_id=bird_id, session=session,
source=channel_drop_path, verbose=verbose)
return mean_correction, std_correction
def get_feature_dropping_corrections(bird_id='z007', session='day-2016-09-09', feat_type: str = 'pow', verbose=True):
""" Import the results of make_parameter_sweep
:param bird_id:
:param session:
:param verbose:
:return:
accuracy : ndarray, (bin_widths, offsets, num_folds, frequencies)
ndarray of the k-fold accuracies
confusions : ndarray, (bin_widths, offsets, num_folds, frequencies)
ndarray of the k-fold confusion matrices
"""
assert feat_type in ['pow', 'phase', 'both'], "invalid feat_type"
mean_correction_name = "accuracy_" + feat_type
mean_correction = _load_numpy_data(data_name=mean_correction_name, bird_id=bird_id, session=session,
source=channel_drop_path, verbose=verbose)
std_correction_name = "std_all_" + feat_type
std_correction = _load_numpy_data(data_name=std_correction_name, bird_id=bird_id, session=session,
source=channel_drop_path, verbose=verbose)
return mean_correction, std_correction
def single_frequency_cross_valid_accuracy_chunk(event_data, ClassObj, drop_temps, sel_freq, k_folds=5, seed=None,
verbose=False):
""" K-Fold Validated Accuracy for each Frequency Band Seperately
Parameters
----------
event_data : ndarray | (classes, instances, frequencies, channels, samples)
Randomly Rebalanced Neural Data (output of balance_classes)
ClassObj : class
classifier object from the scikit-learn package
drop_temps : list
list of the indexes of templates to not use as features
sel_freq : int
the index of the frequency to be used for the narrow channel dropping
k_folds : int
Number of Folds to Split between Template | Train/Test sets, defaults to 5,
seed : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is
the random number generator; If None, the random number generator is the RandomState instance used by np.random.
verbose : bool
If True the function will print out useful information for user as it runs, defaults to False.
Returns
-------
all_crossvalid_acc : ndarray, (num_folds, frequencies)
ndarray of the k-fold accuracies
all_kfold_confusions : ndarray, (num_folds, frequencies, num_labels, num_labels)
ndarray of the k-fold confusion matrices
"""
# 1.) Make Array for Holding all of the feature dropping curves
nested_crossvalid_acc = [] # np.zeros([])
nested_kfold_confusions = []
# 2.) Create INDEX of all instances of interests : create_discrete_index()
label_identities, label_index = cat.create_discrete_index(event_data=event_data)
identity_index = np.arange(len(label_index))
sss = cat.StratifiedShuffleSplit(n_splits=k_folds, random_state=seed)
sss.get_n_splits(identity_index, label_index)
if verbose:
print(sss)
# --------- For Loop over possible Training Sets---------
for train_index, test_index in sss.split(identity_index, label_index):
if verbose:
print("TRAIN:", train_index, "TEST:", test_index)
X_train, X_test = identity_index[train_index], identity_index[test_index]
y_train, y_test = label_index[train_index], label_index[test_index]
# 4.) Use INDEX to Break into corresponding [template/training set| test set] : ml_selector()
# 4.1) Get template set/training : ml_selector(event_data, identity_index, label_index, sel_instances)
sel_train = cat.ml_selector(event_data=event_data, identity_index=label_identities, label_index=label_index,
sel_instances=X_train, )
# 4.1) Get test set : ml_selector()
sel_test = cat.ml_selector(event_data=event_data, identity_index=label_identities, label_index=label_index,
sel_instances=X_test)
## 5.) Use template/training set to make template : make_templates(event_data)
templates = cat.make_templates(event_data=sel_train)
### 5.2) Remove Template that aren't needed from train
templates = np.delete(templates, drop_temps, axis=0)
## 6.1) Use template/training INDEX and template to create Training Pearson Features : pearson_extraction()
train_pearson_features = cat.pearson_extraction(event_data=sel_train, templates=templates)
## 6.2) Use test INDEX and template to create Test Pearson Features : pearson_extraction()
test_pearson_features = cat.pearson_extraction(event_data=sel_test, templates=templates)
# 7.1) Reorganize Test Set into Machine Learning Format : ml_order_pearson()
ml_trials_train, ml_labels_train = cat.ml_order(extracted_features_array=train_pearson_features)
# 7.2) Get Ledger of the Features
num_freqs, num_chans, num_temps = np.shape(train_pearson_features[0][0]) # Get the shape of the Feature data
ordered_index = cat.make_feature_id_ledger(num_freqs=num_freqs, num_chans=num_chans, num_temps=num_temps)
# 7.3) Reorganize Training Set into Machine Learning Format : ml_order_pearson()
ml_trials_test, ml_labels_test = cat.ml_order(extracted_features_array=test_pearson_features)
fold_frequency_accuracies = []
fold_frequency_confusions = []
for _, freq in enumerate([sel_freq]):
if verbose:
print("On Frequency Band:", freq, " of:", num_freqs)
ml_trials_train_cp = ml_trials_train.copy() # make a copy of the feature extracted Train data
ml_trials_test_cp = ml_trials_test.copy() # make a copy of the feature extracted Test data
ordered_index_cp = ordered_index.copy() # make a copy of the ordered_index
all_other_freqs = list(np.delete(np.arange(num_freqs), [freq])) # Make a index of the other frequencies
temp_feature_dict = cat.make_feature_dict(ordered_index=ordered_index_cp,
drop_type='frequency') # Feature Dict
# reduce to selected frequency from the COPY of the training data
ml_trials_train_freq, full_drop = cat.drop_features(features=ml_trials_train_cp, keys=temp_feature_dict,
desig_drop_list=all_other_freqs)
# reduce to the selected frequency from the COPY of test data
ml_trials_test_freq, _ = cat.drop_features(features=ml_trials_test_cp, keys=temp_feature_dict,
desig_drop_list=all_other_freqs)
# 8.) Perform Nested Feature Dropping with K-Fold Cross Validation
acc, _, confusion = cat.clip_classification(ClassObj=ClassObj, train_set=ml_trials_train_freq,
train_labels=ml_labels_train, test_set=ml_trials_test_freq,
test_labels=ml_labels_test)
fold_frequency_accuracies.append(acc)
fold_frequency_confusions.append(confusion)
nested_crossvalid_acc.append(fold_frequency_accuracies)
nested_kfold_confusions.append(fold_frequency_confusions)
# 9.) Combine all curve arrays to one array
all_crossvalid_acc = np.array(nested_crossvalid_acc) # (n_folds, n_freqs)
all_kfold_confusions = np.array(nested_kfold_confusions) # (n_folds, n_freqs, n_classes, n_classes)
return all_crossvalid_acc, all_kfold_confusions
def single_frequency_cross_valid_accuracy_chunk_both(power_data, phase_data, ClassObj, drop_temps, sel_freq, k_folds=5,
seed=None, verbose=False):
""" Runs the Random Channel Feature Dropping algorithm on a set of pre-processed data
Parameters
----------
power_data : ndarray | (classes, instances, frequencies, channels, samples)
Randomly Rebalanced Neural Data (output of balance_classes)
phase_data : ndarray | (classes, instances, frequencies, channels, samples)
Randomly Rebalanced Neural Data (output of balance_classes)
ClassObj : class
classifier object from the scikit-learn package
drop_temps : list
list of the indexes of templates to not use as features
sel_freq : int
the index of the frequency to be used for the narrow channel dropping
k_folds : int
Number of Folds to Split between Template | Train/Test sets, defaults to 5,
seed : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is
the random number generator; If None, the random number generator is the RandomState instance used by np.random.
verbose : bool
If True the funtion will print out useful information for user as it runs, defaults to False.
Returns
-------
"""
# 1.) Make Array for Holding all of the feature dropping curves
nested_crossvalid_acc = [] # np.zeros([])
nested_kfold_confusions = []
# 2.) Create INDEX of all instances of interests : create_discrete_index()
label_identities, label_index = cat.create_discrete_index(event_data=power_data)
identity_index = np.arange(len(label_index))
sss = cat.StratifiedShuffleSplit(n_splits=k_folds, random_state=seed)
sss.get_n_splits(identity_index, label_index)
if verbose:
print(sss)
fold_number = 0
# --------- For Loop over possible Training Sets---------
for train_index, test_index in sss.split(identity_index, label_index):
if verbose:
print("TRAIN:", train_index, "TEST:", test_index)
fold_number += 1
print("On Fold #" + str(fold_number) + ' of ' + str(k_folds))
X_train, X_test = identity_index[train_index], identity_index[test_index]
y_train, y_test = label_index[train_index], label_index[test_index]
# 4.) Use INDEX to Break into corresponding [template/training set| test set] : ml_selector()
# 4.1) Get template set/training : ml_selector(power_data, identity_index, label_index, sel_instances)
sel_train_pow = cat.ml_selector(event_data=power_data, identity_index=label_identities, label_index=label_index,
sel_instances=X_train, )
sel_train_phas = cat.ml_selector(event_data=phase_data, identity_index=label_identities,
label_index=label_index,
sel_instances=X_train, )
# 4.1) Get test set : ml_selector()
sel_test_pow = cat.ml_selector(event_data=power_data, identity_index=label_identities, label_index=label_index,
sel_instances=X_test)
sel_test_phas = cat.ml_selector(event_data=phase_data, identity_index=label_identities, label_index=label_index,
sel_instances=X_test)
# 5.) Use template/training set to make template : make_templates(power_data)
templates_pow = cat.make_templates(event_data=sel_train_pow)
templates_phas = cat.make_templates(event_data=sel_train_phas)
### 5.2) Remove Template that aren't needed from train
templates_pow = np.delete(templates_pow, drop_temps, axis=0)
templates_phas = np.delete(templates_phas, drop_temps, axis=0)
# 6.1) Use template/training INDEX and template to create Training Pearson Features : pearson_extraction()
train_pearson_features_pow = cat.pearson_extraction(event_data=sel_train_pow, templates=templates_pow)
train_pearson_features_phas = cat.pearson_extraction(event_data=sel_train_phas, templates=templates_phas)
# 6.2) Use test INDEX and template to create Test Pearson Features : pearson_extraction()
test_pearson_features_pow = cat.pearson_extraction(event_data=sel_test_pow, templates=templates_pow)
test_pearson_features_phas = cat.pearson_extraction(event_data=sel_test_phas, templates=templates_phas)
# 7.1) Reorganize Test Set into Machine Learning Format : ml_order_pearson()
ml_trials_train_pow, ml_labels_train = cat.ml_order(extracted_features_array=train_pearson_features_pow)
ml_trials_train_phas, _ = cat.ml_order(extracted_features_array=train_pearson_features_phas)
ml_trials_train = np.concatenate([ml_trials_train_pow, ml_trials_train_phas], axis=-1)
# 7.2) Get Ledger of the Features
num_freqs, num_chans, num_temps = np.shape(
train_pearson_features_pow[0][0]) # Get the shape of the Feature data
ordered_index = cat.make_feature_id_ledger(num_freqs=num_freqs, num_chans=num_chans, num_temps=num_temps)
ordered_index = np.concatenate([ordered_index, ordered_index], axis=0)
# 7.3) Reorganize Training Set into Machine Learning Format : ml_order_pearson()
ml_trials_test_pow, ml_labels_test = cat.ml_order(extracted_features_array=test_pearson_features_pow)
ml_trials_test_phas, _ = cat.ml_order(extracted_features_array=test_pearson_features_phas)
ml_trials_test = np.concatenate([ml_trials_test_pow, ml_trials_test_phas], axis=-1)
fold_frequency_accuracies = []
fold_frequency_confusions = []
for _, freq in enumerate([sel_freq]):
# if verbose:
# print("On Frequency Band:", freq, " of:", num_freqs)
ml_trials_train_cp = ml_trials_train.copy() # make a copy of the feature extracted Train data
ml_trials_test_cp = ml_trials_test.copy() # make a copy of the feature extracted Test data
ordered_index_cp = ordered_index.copy() # make a copy of the ordered_index
all_other_freqs = list(np.delete(np.arange(num_freqs), [freq])) # Make a index of the other frequencies
temp_feature_dict = cat.make_feature_dict(ordered_index=ordered_index_cp,
drop_type='frequency') # Feature Dict
# reduce to selected frequency from the COPY of the training data
ml_trials_train_freq, full_drop = cat.drop_features(features=ml_trials_train_cp, keys=temp_feature_dict,
desig_drop_list=all_other_freqs)
# reduce to but the selected frequency from the COPY of test data
ml_trials_test_freq, _ = cat.drop_features(features=ml_trials_test_cp, keys=temp_feature_dict,
desig_drop_list=all_other_freqs)
# 8.) Perform K-Fold Cross Validation
acc, _, confusion = cat.clip_classification(ClassObj=ClassObj, train_set=ml_trials_train_freq,
train_labels=ml_labels_train, test_set=ml_trials_test_freq,
test_labels=ml_labels_test)
fold_frequency_accuracies.append(acc)
fold_frequency_confusions.append(confusion)
nested_crossvalid_acc.append(fold_frequency_accuracies)
nested_kfold_confusions.append(fold_frequency_confusions)
# 9.) Combine all curve arrays to one array
all_crossvalid_acc = np.array(nested_crossvalid_acc) # (n_folds, n_freqs)
all_kfold_confusions = np.array(nested_kfold_confusions) # (n_folds, n_freqs, n_classes, n_classes)
return all_crossvalid_acc, all_kfold_confusions
def fix_best_feature_dropping_report(bird_id='z007', session='day-2016-09-09'):
warnings.filterwarnings("ignore", category=UserWarning) # So that it doesn't print warnings until oblivion
zdata = ImportData(bird_id=bird_id, session=session)
# Get the Bird Specific Machine Learning Meta Data
bad_channels = all_bad_channels[bird_id]
drop_temps = all_drop_temps[bird_id]
# Get the Best Parameters for Bindwidth and Offset
bin_widths = best_bin_width[session]
offsets = best_offset[session]
# Reshape Handlabels into Useful Format
chunk_labels_list, chunk_onsets_list = fet.get_chunk_handlabels(handlabels_list=zdata.song_handlabels)
# Set the Frequency Bands to Be Used for Feature Extraction
fc_lo = [4, 8, 25, 30, 50]
fc_hi = [8, 12, 35, 50, 70]
# Pre-Process the Data (Power)
pred_data_pow = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo,
h_freqs=fc_hi,
hilbert='amplitude',
bad_channels=bad_channels,
drop_bad=True,
verbose=True)
# Pre-Process the Data (Phase)
pred_data_phase = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo, h_freqs=fc_hi,
hilbert='phase',
bad_channels=bad_channels,
drop_bad=True,
verbose=True)
# Get the Bird Specific label Instructions
label_instructions = all_label_instructions[bird_id] # get this birds default label instructions
times_of_interest = fet.label_extractor(all_labels=chunk_labels_list,
starts=chunk_onsets_list[0],
label_instructions=label_instructions)
# Get Silence Periods
silent_periods = fet.long_silence_finder(silence=8,
all_labels=chunk_labels_list,
all_starts=chunk_onsets_list[0],
all_ends=chunk_onsets_list[1],
window=(-500, 500))
# Append the Selected Silence to the end of the Events array
times_of_interest.append(silent_periods)
pow_accuracy_holder = []
phase_accuracy_holder = []
both_accuracy_holder = []
for freq_num, (offset, bin_width) in enumerate(zip(offsets, bin_widths)):
# Grab the Neural Activity Centered on Each event
set_window = (offset - bin_width, offset)
chunk_events_power = fet.event_clipper_nd(data=pred_data_pow, label_events=times_of_interest,
fs=1000, window=set_window)
chunk_events_phase = fet.event_clipper_nd(data=pred_data_phase, label_events=times_of_interest,
fs=1000, window=set_window)
# Balance the sets
chunk_events_balanced_pow = mlpu.balance_classes(chunk_events_power)
chunk_events_balanced_phase = mlpu.balance_classes(chunk_events_phase)
priors = get_priors(num_labels=len(times_of_interest)) # Set the priors to be equal
print(priors)
rand_obj = LinearDiscriminantAnalysis(n_components=None, priors=priors, shrinkage=None,
solver='svd', store_covariance=False, tol=0.0001)
# Run Analysis on Only Power
nested_accuracy_pow, _ = single_frequency_cross_valid_accuracy_chunk(event_data=chunk_events_balanced_pow,
ClassObj=rand_obj, drop_temps=drop_temps,
sel_freq=freq_num,
k_folds=5, seed=None, verbose=True)
pow_accuracy_holder.append(nested_accuracy_pow)
# Run Analysis on Only Phase
nested_accuracy_phase, _ = single_frequency_cross_valid_accuracy_chunk(event_data=chunk_events_balanced_phase,
ClassObj=rand_obj,
drop_temps=drop_temps,
sel_freq=freq_num,
k_folds=5, seed=None, verbose=True)
phase_accuracy_holder.append(nested_accuracy_phase)
# Run Analysis on Both Features Independently
nested_accuracy_both, _ = single_frequency_cross_valid_accuracy_chunk_both(
power_data=chunk_events_balanced_pow, phase_data=chunk_events_balanced_phase, ClassObj=rand_obj,
drop_temps=drop_temps, sel_freq=freq_num, k_folds=5, seed=None, verbose=True)
both_accuracy_holder.append(nested_accuracy_both)
# Save the power Values
accuracy_pow = np.mean(pow_accuracy_holder, axis=1)
std_pow = np.std(pow_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_pow, data_name="accuracy_pow", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_pow, data_name="std_all_pow", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
# Save the phase Values
accuracy_phase = np.mean(phase_accuracy_holder, axis=1)
std_phase = np.std(phase_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_phase, data_name="accuracy_phase", bird_id=bird_id,
session=session,
destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_phase, data_name="std_all_phase", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
# Save the both Values
accuracy_both = np.mean(both_accuracy_holder, axis=1)
std_both = np.std(both_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_both, data_name="accuracy_both", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_both, data_name="std_all_both", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
def fix_best_feature_dropping_report_repeats(bird_id='z007', session='day-2016-09-09'):
warnings.filterwarnings("ignore", category=UserWarning) # So that it doesn't print warnings until oblivion
zdata = ImportData(bird_id=bird_id, session=session)
# Get the Bird Specific Machine Learning Meta Data
bad_channels = all_bad_channels[bird_id]
drop_temps = all_drop_temps[bird_id]
# Get the Best Parameters for Bindwidth and Offset
bin_widths = best_bin_width[session]
offsets = best_offset[session]
# Reshape Handlabels into Useful Format
chunk_labels_list, chunk_onsets_list = fet.get_chunk_handlabels(handlabels_list=zdata.song_handlabels)
# Set the Frequency Bands to Be Used for Feature Extraction
fc_lo = [4, 8, 25, 30, 50]
fc_hi = [8, 12, 35, 50, 70]
# Pre-Process the Data (Power)
pred_data_pow = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo,
h_freqs=fc_hi,
hilbert='amplitude',
bad_channels=bad_channels,
drop_bad=True,
verbose=True)
# Pre-Process the Data (Phase)
pred_data_phase = hbp.feature_extraction_chunk(neural_chunks=zdata.song_neural,
fs=1000,
l_freqs=fc_lo, h_freqs=fc_hi,
hilbert='phase',
bad_channels=bad_channels,
drop_bad=True,
verbose=True)
# Get the Bird Specific label Instructions
label_instructions = all_label_instructions[bird_id] # get this birds default label instructions
times_of_interest = fet.label_extractor(all_labels=chunk_labels_list,
starts=chunk_onsets_list[0],
label_instructions=label_instructions)
# Get Silence Periods
silent_periods = fet.long_silence_finder(silence=8,
all_labels=chunk_labels_list,
all_starts=chunk_onsets_list[0],
all_ends=chunk_onsets_list[1],
window=(-500, 500))
# Append the Selected Silence to the end of the Events array
times_of_interest.append(silent_periods)
pow_accuracy_holder = []
phase_accuracy_holder = []
both_accuracy_holder = []
for freq_num, (offset, bin_width) in enumerate(zip(offsets, bin_widths)):
# Grab the Neural Activity Centered on Each event
set_window = (offset - bin_width, offset)
chunk_events_power = fet.event_clipper_nd(data=pred_data_pow, label_events=times_of_interest,
fs=1000, window=set_window)
chunk_events_phase = fet.event_clipper_nd(data=pred_data_phase, label_events=times_of_interest,
fs=1000, window=set_window)
# Balance the sets
chunk_events_balanced_pow = mlpu.balance_classes(chunk_events_power)
chunk_events_balanced_phase = mlpu.balance_classes(chunk_events_phase)
priors = get_priors(num_labels=len(times_of_interest)) # Set the priors to be equal
print(priors)
rand_obj = LinearDiscriminantAnalysis(n_components=None, priors=priors, shrinkage=None,
solver='svd', store_covariance=False, tol=0.0001)
nested_accuracy_phase = []
nested_accuracy_pow = []
nested_accuracy_both = []
for index in range(1000):
# Run Analysis on Only Power
accuracy_pow, _ = single_frequency_cross_valid_accuracy_chunk(event_data=chunk_events_balanced_pow,
ClassObj=rand_obj, drop_temps=drop_temps,
sel_freq=freq_num,
k_folds=5, seed=None, verbose=True)
nested_accuracy_pow.extend(accuracy_pow)
# Run Analysis on Only Phase
accuracy_phase, _ = single_frequency_cross_valid_accuracy_chunk(event_data=chunk_events_balanced_phase,
ClassObj=rand_obj,
drop_temps=drop_temps,
sel_freq=freq_num,
k_folds=5, seed=None, verbose=True)
nested_accuracy_phase.extend(accuracy_phase)
# Run Analysis on Both Features Independently
accuracy_both, _ = single_frequency_cross_valid_accuracy_chunk_both(
power_data=chunk_events_balanced_pow, phase_data=chunk_events_balanced_phase, ClassObj=rand_obj,
drop_temps=drop_temps, sel_freq=freq_num, k_folds=5, seed=None, verbose=True)
nested_accuracy_both.extend(accuracy_both)
pow_accuracy_holder.append(nested_accuracy_pow)
phase_accuracy_holder.append(nested_accuracy_phase)
both_accuracy_holder.append(nested_accuracy_both)
# Save the power Values
accuracy_pow = np.mean(pow_accuracy_holder, axis=1)
std_pow = np.std(pow_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_pow, data_name="accuracy_repeat_pow", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_pow, data_name="std_all_repeat_pow", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
# Save the phase Values
accuracy_phase = np.mean(phase_accuracy_holder, axis=1)
std_phase = np.std(phase_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_phase, data_name="accuracy_repeat_phase", bird_id=bird_id,
session=session,
destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_phase, data_name="std_all_repeat_phase", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
# Save the both Values
accuracy_both = np.mean(both_accuracy_holder, axis=1)
std_both = np.std(both_accuracy_holder, axis=1, ddof=1)
_save_numpy_data(data=accuracy_both, data_name="accuracy_repeat_both", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
_save_numpy_data(data=std_both, data_name="std_all_repeat_both", bird_id=bird_id,
session=session, destination=channel_drop_path, make_parents=True, verbose=True)
| 53.95302
| 125
| 0.641715
| 3,919
| 32,156
| 4.922174
| 0.088798
| 0.01493
| 0.009331
| 0.011198
| 0.923173
| 0.907206
| 0.902385
| 0.882789
| 0.871954
| 0.847071
| 0
| 0.009989
| 0.290179
| 32,156
| 595
| 126
| 54.043697
| 0.835137
| 0.229351
| 0
| 0.730887
| 0
| 0
| 0.021933
| 0.002419
| 0
| 0
| 0
| 0
| 0.006116
| 1
| 0.018349
| false
| 0
| 0.045872
| 0
| 0.076453
| 0.024465
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a871f72a16faf66be4c5510b835882ad92a45c6
| 13,825
|
py
|
Python
|
comments/tests/custom_user.py
|
RichardHirtle/c4all
|
a09c4b098cf1a58ed5e3ab6116a749a17ec035e0
|
[
"MIT"
] | 4
|
2016-09-03T12:43:13.000Z
|
2020-04-22T14:49:28.000Z
|
comments/tests/custom_user.py
|
RichardHirtle/c4all
|
a09c4b098cf1a58ed5e3ab6116a749a17ec035e0
|
[
"MIT"
] | 1
|
2019-09-25T12:49:01.000Z
|
2020-08-04T11:33:09.000Z
|
comments/tests/custom_user.py
|
RichardHirtle/c4all
|
a09c4b098cf1a58ed5e3ab6116a749a17ec035e0
|
[
"MIT"
] | 3
|
2015-03-17T13:38:42.000Z
|
2016-05-06T15:06:31.000Z
|
from django.db import IntegrityError
from django.test import Client
from django.core.urlresolvers import reverse
from base import BaseTestCase
from comments.models import CustomUser, Site, Thread, Comment
class CustomUserManagerTestCase(BaseTestCase):
def test_customusermanager_create_user_succeeds(self):
email = 'a@b.com'
CustomUser.objects.create_user(email, 'pass')
self.assertEqual(CustomUser.objects.count(), 1)
user = CustomUser.objects.all()[0]
self.assertEqual(user.email, email)
def test_customusermanager_user_creation_with_existing_email_fails(self):
email = 'a@b.com'
CustomUser.objects.create_user(email, 'pass')
with self.assertRaises(IntegrityError):
CustomUser.objects.create_user(email, 'pass')
def test_get_user_domain_data(self):
email = 'a@b.com'
u = CustomUser.objects.create_user(email, 'pass')
site = Site.objects.create(domain='www.google.com')
thread = Thread.objects.create(site=site, url='url')
thread.disliked_by.add(u)
comment = Comment.objects.create(thread=thread, user=u, text='aaa')
comment.liked_by.add(u)
ret = u.get_user_domain_data()
self.assertEqual(list(ret['posted_comments']), [comment.id])
self.assertEqual(list(ret['liked_threads']), [])
self.assertEqual(list(ret['disliked_threads']), [thread.id])
self.assertEqual(list(ret['liked_comments']), [comment.id])
self.assertEqual(list(ret['disliked_comments']), [])
def test_bulk_delete_successfully_deletes_users(self):
CustomUser.objects.bulk_create([
CustomUser(email='a@b.com', password='pass'),
CustomUser(email='b@b.com', password='pass'),
])
users = CustomUser.objects.all()
CustomUser.objects.bulk_delete(users)
self.assertEqual(CustomUser.objects.count(), 0)
def test_bulk_delete_successfully_deletes_non_staff_users(self):
CustomUser.objects.bulk_create([
CustomUser(email='a@b.com', password='pass'),
CustomUser(email='c@b.com', password='pass', is_staff=True)
])
users = CustomUser.objects.all()
CustomUser.objects.bulk_delete(users, is_staff=False)
self.assertEqual(CustomUser.objects.count(), 1)
class CustomUserTestCase(BaseTestCase):
def test_hide_user_is_success(self):
"""
Tests hide class method. First user's hidden state is set to False.
After calling hide method, user's hidden state should be True.
"""
user = CustomUser.objects.create_user(
email="donald@duck.com",
password="pass",
)
site = Site.objects.create(domain='www.google.com')
self.assertFalse(user.hidden.filter(id=site.id))
user.hide(site)
self.assertTrue(user.hidden.filter(id=site.id))
def test_hide_admin_doesnt_change_hidden_state(self):
"""
Tests hide class method for staff users. First user's hidden state is
set to False. After calling hide method, user's hidden state should
not be changed because staff should not be hidden.
"""
user = CustomUser.objects.create_superuser(
email="donald@duck.com",
password="pass",
)
site = Site.objects.create(domain='www.google.com')
self.assertFalse(user.hidden.filter(id=site.id))
user.hide(site)
self.assertFalse(user.hidden.filter(id=site.id))
def test_unhide_user_is_success(self):
"""
Tests unhide class method. First user's hidden state is set to True.
After calling unhide method, user's hidden state should be False.
"""
user = CustomUser.objects.create_user(
email="donald@duck.com",
password="pass",
)
site = Site.objects.create(domain='www.google.com')
user.hidden.add(site)
user.save()
self.assertTrue(user.hidden.filter(id=site.id))
user.unhide(site)
self.assertFalse(user.hidden.filter(id=site.id))
def test_unhide_admin_is_success(self):
"""
Tests if admin's hidden flag state is changed by unhide method (though
admins cannot be hidden by using hide method, hidden flag can be
set to True in DB by other means)
"""
user = CustomUser.objects.create_superuser(
email="donald@duck.com",
password="pass",
)
site = Site.objects.create(domain='www.google.com')
user.hide(site)
user.save()
user.unhide(site)
self.assertFalse(user.hidden.filter(id=site.id))
def test_delete_method_deletes_user_comments(self):
"""
Tests custom delete method in CustomUser model.
"""
u1 = CustomUser.objects.create_user(
email="donald@duck.com",
password="pass"
)
CustomUser.objects.create_user(
email="daffy@duck.com",
password="pass"
)
site = Site.objects.create(domain='www.google.com')
thread = Thread.objects.create(site=site, url='url')
Comment.objects.create(thread=thread, user=u1, text='aaa')
users = CustomUser.objects.all()
self.assertEqual(users.count(), 2)
self.assertEqual(Comment.objects.get(user=u1).user, u1)
u1.delete()
self.assertEqual(users.count(), 1)
self.assertFalse(Comment.objects.exists())
def test_delete_method_doesnt_delete_admin_user(self):
"""
Tests custom delete method in CustomUser model. Method should not
be able to delete user who is staff member.
"""
u1 = CustomUser.objects.create_superuser(
email="donald@duck.com",
password="pass"
)
site = Site.objects.create(domain='www.google.com')
thread = Thread.objects.create(site=site, url='url')
Comment.objects.create(thread=thread, user=u1, text='aaa')
users = CustomUser.objects.all()
self.assertEqual(users.count(), 1)
u1.delete()
self.assertEqual(users.count(), 1)
def test_delete_method_deletes_users_comments(self):
"""
Tests custom delete method in CustomUser model. Except user's data,
delete should also erase user made comments.
"""
site = Site.objects.create(domain='www.donald.duck')
thread = Thread.objects.create(site=site, url='url')
user = CustomUser.objects.create_user(
email="donald@duck.com",
password="pass"
)
Comment.objects.create(user=user, thread=thread)
Comment.objects.create(user=user, thread=thread)
self.assertEqual(Comment.objects.count(), 2)
user.delete()
self.assertEqual(Comment.objects.count(), 0)
class RegisterEndpointTestCase(BaseTestCase):
def setUp(self):
self.client = Client()
self.endpoint_url = reverse('comments:register_user')
def test_register_user_succeeds(self):
email = 'a@b.com'
r = self.client.post(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 200)
self.assertEqual(CustomUser.objects.count(), 1)
user = CustomUser.objects.all()[0]
self.assertEqual(user.email, email)
def test_register_user_twice_with_same_email_fails(self):
email = 'a@b.com'
self.client.post(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
r = self.client.post(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_register_user_with_invalid_email_fails(self):
email = 'this is invalid email'
r = self.client.post(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_try_get_request_on_register_endpoint_fails(self):
email = 'this is invalid email'
r = self.client.get(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
self.assertEqual(r.status_code, 405)
def test_register_user_with_nonexistent_email_fails(self):
r = self.client.post(self.endpoint_url, data={
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_register_user_with_nonexistent_password_fails(self):
email = 'a@b.com'
r = self.client.post(self.endpoint_url, data={
'email': email,
'avatar_num': 1,
'full_name': 'donald duck',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_register_user_returns_iframeId_if_provided(self):
email = 'a@b.com'
iframeId = 'some_donald_duck_id_123'
r = self.client.post(self.endpoint_url, data={
'email': email,
'password': 'pass',
'password2': 'pass',
'avatar_num': 1,
'full_name': 'donald duck',
'iframeId': iframeId,
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 200)
self.assertTrue('iframeId' in data)
self.assertEqual(data['iframeId'], iframeId)
class LoginEndpointTestCase(BaseTestCase):
def setUp(self):
self.client = Client()
self.endpoint_url = reverse('comments:login_user')
def test_login_user_succeeds(self):
email = 'a@b.com'
password = 'pass'
CustomUser.objects.create_user(email, password)
site = Site.objects.create(domain='www.google.com')
r = self.client.post(self.endpoint_url, data={
'site_id': site.id,
'email': email,
'password': password,
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 200)
def test_login_user_fails_no_data(self):
r = self.client.post(self.endpoint_url, data={})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_login_user_no_password_fails(self):
r = self.client.post(self.endpoint_url, data={
'email': 'a@b.com',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_login_user_no_email_fails(self):
r = self.client.post(self.endpoint_url, data={
'password': 'pass',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_login_user_wrong_password_fails(self):
email = 'a@b.com'
password = 'pass'
CustomUser.objects.create_user(email, password)
r = self.client.post(self.endpoint_url, data={
'email': email,
'password': 'wrong_password',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_login_user_wrong_email_fails(self):
r = self.client.post(self.endpoint_url, data={
'email': 'wrong@wrongity.wrong',
'password': 'pass',
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 400)
def test_login_user_wrong_http_method(self):
r = self.client.get(self.endpoint_url, data={
'email': 'a@b.com',
'password': 'pass',
})
self.assertEqual(r.status_code, 405)
def test_login_user_returns_iframeId_if_provided(self):
email = 'a@b.com'
password = 'pass'
CustomUser.objects.create_user(email, password)
iframeId = 'some_donald_duck_id_123'
site = Site.objects.create(domain='www.google.com')
r = self.client.post(self.endpoint_url, data={
'site_id': site.id,
'email': email,
'password': password,
'iframeId': iframeId,
})
data = self.get_data_from_response(r.content)
status_code = data['status_code']
self.assertEqual(status_code, 200)
self.assertTrue('iframeId' in data)
self.assertEqual(data['iframeId'], iframeId)
| 31.854839
| 78
| 0.611356
| 1,632
| 13,825
| 4.995711
| 0.099265
| 0.050288
| 0.033117
| 0.037287
| 0.831841
| 0.79345
| 0.746596
| 0.703177
| 0.669324
| 0.632896
| 0
| 0.008507
| 0.268788
| 13,825
| 433
| 79
| 31.928406
| 0.798002
| 0.064738
| 0
| 0.742574
| 0
| 0
| 0.114295
| 0.005353
| 0
| 0
| 0
| 0
| 0.151815
| 1
| 0.09571
| false
| 0.148515
| 0.016502
| 0
| 0.125413
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
9ab7afc88085b8de1b113dacc0dde8df8b141ed4
| 4,955
|
py
|
Python
|
stocks/models.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
stocks/models.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
stocks/models.py
|
Gomax-07/stockInvent
|
614beaa2fad59857b568452ebcda2c3f9a0c6d7f
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils import timezone
# from django.contrib.auth.models import User
# from users.models import User
from django.template.defaultfilters import slugify
from django.urls import reverse
class Supplier(models.Model):
id = models.IntegerField(primary_key=True)
# user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(max_length=120, unique=True)
address = models.CharField(max_length=220)
created_date = models.DateField(auto_now_add=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('supplier_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
class Buyer(models.Model):
id = models.IntegerField(primary_key=True)
# user = models.OneToOneField(User, on_delete=models.CASCADE, null=True, blank=True)
name = models.CharField(max_length=120, unique=True)
address = models.CharField(max_length=220)
created_date = models.DateField(auto_now_add=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('buyer_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
class Season(models.Model):
name = models.CharField(max_length=120, unique=True)
description = models.CharField(max_length=220)
created_date = models.DateField(auto_now_add=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('season_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
class Drop(models.Model):
name = models.CharField(max_length=120, unique=True)
created_date = models.DateField(auto_now_add=True)
def __str__(self):
return self.name
class Product(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=120, unique=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
sortno = models.PositiveIntegerField()
created_date = models.DateField(auto_now_add=True)
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('product_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
class Order(models.Model):
STATUS_CHOICE = (
('pending', 'Pending'),
('decline', 'Decline'),
('approved', 'Approved'),
('processing', 'Processing'),
('complete', 'Complete'),
('bulk', 'Bulk'),
)
id = models.IntegerField(primary_key=True)
name = models.CharField(max_length=120, unique=False)
supplier = models.CharField(max_length=50)
product = models.CharField(max_length=50)
design = models.CharField(max_length=50)
color = models.CharField(max_length=50)
buyer = models.CharField(max_length=50)
season =models.CharField(max_length=50)
drop =models.CharField(max_length=50)
status = models.CharField(max_length=10, choices=STATUS_CHOICE)
created_date = models.DateField(auto_now_add=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
def __str__(self):
return self.product.name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('order_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
class Delivery(models.Model):
id = models.IntegerField(primary_key=True)
order =models.CharField(max_length=50)
courier_name = models.CharField(max_length=120)
created_date = models.DateField(auto_now_add=True)
slug = models.SlugField(max_length=250,null=False, unique=True)
def __str__(self):
return self.courier_name
def get_absolute_url(self):
kwargs = {
'slug': self.slug
}
return reverse('delivery_detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value,)
super().save(*args, **kwargs)
| 30.96875
| 88
| 0.651665
| 608
| 4,955
| 5.141447
| 0.139803
| 0.071977
| 0.109405
| 0.145873
| 0.81318
| 0.746641
| 0.730326
| 0.730326
| 0.715931
| 0.715931
| 0
| 0.017192
| 0.225227
| 4,955
| 159
| 89
| 31.163522
| 0.797083
| 0.048234
| 0
| 0.624
| 0
| 0
| 0.040977
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152
| false
| 0
| 0.032
| 0.056
| 0.656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9adba128926809c01da50feba32385c311c8b13b
| 34,177
|
py
|
Python
|
rooms/tests/test_views.py
|
xNovax/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 24
|
2020-02-01T17:22:47.000Z
|
2020-10-24T19:49:36.000Z
|
rooms/tests/test_views.py
|
xNovax/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 16
|
2020-02-01T14:30:15.000Z
|
2020-08-13T20:49:56.000Z
|
rooms/tests/test_views.py
|
aaronspindler/RoomScout
|
287240a9d13f2b8f6ce9abdc95cf611671970fc3
|
[
"MIT"
] | 6
|
2020-02-01T22:07:46.000Z
|
2021-03-05T14:05:27.000Z
|
from decimal import Decimal
from django.contrib.auth import get_user_model
from django.test import TestCase, Client
from django.urls import reverse
from houses.models import House
from rooms.models import RoomLike, Room
class RoomsViewTests(TestCase):
def setUp(self):
self.client = Client()
User = get_user_model()
self.user = User.objects.create_user(username='FredFlintstone', email='aaron@xnovax.net', password='babadoo')
self.user2 = User.objects.create_user(username='JackyFlintstone', email='jacky@flintstone.com', password='lovefred')
house = House.objects.create(user=self.user)
house.place_id = 'EiwyNTI5IFN0YWxsaW9uIERyLCBPc2hhd2EsIE9OIEwxSCA3SzQsIENhbmFkYSIxEi8KFAoSCY_JD3vDG9WJEe3JFhlBvwOKEOETKhQKEgnrS9FlwxvViRHYx20MM9m-8g'
house.lat = '43.95858010000001'
house.lon = '-78.91587470000002'
house.street_number = 2529
house.street_name = 'Stallion Drive'
house.city = 'Oshawa'
house.prov_state = 'ON'
house.postal_code = 'L1H 0M4'
house.country = 'Canada'
house.save()
self.house = house
def test_rooms_views_room_saved_get(self):
print('Testing rooms.views.room_saved() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
response = self.client.get(reverse('room_saved'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_saved.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
def test_rooms_views_room_saved_get_empty(self):
print('Testing rooms.views.room_saved() GET empty')
self.client.force_login(self.user)
response = self.client.get(reverse('room_saved'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_saved.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Saved Rooms')
self.assertContains(response, "Looks like you haven't saved any rooms yet!")
def test_rooms_views_room_saved_get_not_logged_in(self):
print('Testing rooms.views.room_saved() GET not logged in')
self.client.logout()
response = self.client.get(reverse('room_saved'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
def test_rooms_views_room_like_get(self):
print('Testing rooms.views.room_like() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = RoomLike.objects.count()
response = self.client.get(reverse('room_like', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'failure'})
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_like_get_not_logged_in(self):
print('Testing rooms.views.room_like() GET not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = RoomLike.objects.count()
response = self.client.get(reverse('room_like', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_like_post(self):
print('Testing rooms.views.room_like() POST')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = RoomLike.objects.count()
response = self.client.post(reverse('room_like', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'success'})
self.assertGreater(post_count, pre_count)
def test_rooms_views_room_like_post_not_logged_in(self):
print('Testing rooms.views.room_like() POST not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = RoomLike.objects.count()
response = self.client.post(reverse('room_like', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_unlike_get(self):
print('Testing rooms.views.room_unlike() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.get(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'failure'})
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_unlike_get_not_logged_in(self):
print('Testing rooms.views.room_unlike() GET not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.get(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_unlike_get_wrong_user_or_empty(self):
print('Testing rooms.views.room_unlike() GET wrong user or empty')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.get(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'failure'})
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_unlike_post(self):
print('Testing rooms.views.room_unlike() POST')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.post(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'success'})
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertLess(post_count, pre_count)
def test_rooms_views_room_unlike_post_not_logged_in(self):
print('Testing rooms.views.room_unlike() POST not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.post(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_unlike_post_wrong_user_or_empty(self):
print('Testing rooms.views.room_unlike() POST wrong user or empty')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
roomlike = RoomLike.objects.create(room=room, user=self.user)
pre_count = RoomLike.objects.count()
response = self.client.post(reverse('room_unlike', kwargs={'pk': room.pk}), follow=True)
post_count = RoomLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertJSONEqual(str(response.content, encoding='utf8'), {'status': 'failure'})
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Saved Rooms')
self.assertNotContains(response, "Looks like you haven't saved any rooms yet!")
self.assertEqual(pre_count, post_count)
def test_rooms_views_room_create_get(self):
print('Testing rooms.views.room_create() GET')
self.client.force_login(self.user)
response = self.client.get(reverse('room_create'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_create.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Post a Room')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
def test_rooms_views_room_create_get_no_houses(self):
print('Testing rooms.views.room_create() GET no existing houses')
self.client.force_login(self.user)
self.house.delete()
response = self.client.get(reverse('room_create'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_create.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Post a Room')
self.assertContains(response, "Looks like you haven't told us about any houses you have!")
def test_rooms_views_room_create_get_not_logged_in(self):
print('Testing rooms.views.room_create() GET not logged in')
self.client.logout()
response = self.client.get(reverse('room_create'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Post a Room')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
def test_rooms_views_room_create_post(self):
print('Testing rooms.views.room_create() POST')
self.client.force_login(self.user)
req_data = {
'house': self.house.id,
'name': 'Master Bedroom',
'price': 799.99,
'description': 'Looking for a student to occupy beginning in December 2019',
}
pre_count = Room.objects.count()
response = self.client.post(reverse('room_create'), data=req_data, follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_detail.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Post a Room')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
self.assertContains(response, 'Master Bedroom')
self.assertGreater(post_count, pre_count)
def test_rooms_views_room_create_post_invalid(self):
print('Testing rooms.views.room_create() POST invalid')
self.client.force_login(self.user)
req_data = {
'house': self.house.id,
'price': 799.99,
'description': 'Looking for a student to occupy beginning in December 2019',
}
pre_count = Room.objects.count()
response = self.client.post(reverse('room_create'), data=req_data, follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_create.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Post a Room')
self.assertContains(response, 'Please make sure to fill in all required details')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_create_post_invalid1(self):
print('Testing rooms.views.room_create() POST invalid 1')
self.client.force_login(self.user)
req_data = {
'house': self.house.id,
'name': 'Master Bedroom',
'description': 'Looking for a student to occupy beginning in December 2019',
}
pre_count = Room.objects.count()
response = self.client.post(reverse('room_create'), data=req_data, follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_create.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Post a Room')
self.assertContains(response, 'Please make sure to fill in all required details')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_create_post_invalid2(self):
print('Testing rooms.views.room_create() POST invalid 2')
self.client.force_login(self.user)
req_data = {
'house': self.house.id,
'name': 'Master Bedroom',
'price': 799.99,
}
pre_count = Room.objects.count()
response = self.client.post(reverse('room_create'), data=req_data, follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_create.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Post a Room')
self.assertContains(response, 'Please make sure to fill in all required details')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_create_post_not_logged_in(self):
print('Testing rooms.views.room_create() POST not logged in')
self.client.logout()
req_data = {
'house': self.house.id,
'name': 'Master Bedroom',
'price': 799.99,
'description': 'Looking for a student to occupy beginning in December 2019',
}
pre_count = Room.objects.count()
response = self.client.post(reverse('room_create'), data=req_data, follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Post a Room')
self.assertNotContains(response, "Looks like you haven't told us about any houses you have!")
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_detail_get(self):
print('Testing rooms.views.room_detail() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
response = self.client.get(reverse('room_detail', kwargs={'pk': room.pk}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_detail.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Edit')
self.assertContains(response, 'Delete')
self.assertContains(response, 'Master Bedroom')
def test_rooms_views_room_detail_get_not_logged_in(self):
print('Testing rooms.views.room_detail() GET not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
response = self.client.get(reverse('room_detail', kwargs={'pk': room.pk}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_detail.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Delete')
self.assertContains(response, 'Master Bedroom')
def test_rooms_views_room_edit_get(self):
print('Testing rooms.views.room_edit() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
response = self.client.get(reverse('room_edit', kwargs={'pk': room.pk}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_edit.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertContains(response, 'Edit')
self.assertContains(response, 'Master Bedroom')
def test_rooms_views_room_edit_get_not_logged_in(self):
print('Testing rooms.views.room_edit() GET not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
response = self.client.get(reverse('room_edit', kwargs={'pk': room.pk}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
def test_rooms_views_room_edit_get_wrong_user(self):
print('Testing rooms.views.room_edit() GET wrong user')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
response = self.client.get(reverse('room_edit', kwargs={'pk': room.pk}), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
def test_rooms_views_room_edit_post(self):
print('Testing rooms.views.room_edit() POST')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
req_data = {
'name': 'Master Suite',
'price': 799.99,
'description': 'Looking for a mature student!',
'is_available': False,
'furnished': False,
'is_accessible': True,
'open_to_students': True,
'female_only': True,
'pet_friendly': True,
'utilities_included': True,
'parking': True
}
self.assertEqual(room.name, 'Master Bedroom')
self.assertEqual(room.price, 0.00)
self.assertEqual(room.description, '')
self.assertTrue(room.is_available)
self.assertFalse(room.furnished)
self.assertFalse(room.is_accessible)
self.assertTrue(room.open_to_students)
self.assertFalse(room.female_only)
self.assertFalse(room.pet_friendly)
self.assertFalse(room.utilities_included)
self.assertFalse(room.parking)
response = self.client.post(reverse('room_edit', kwargs={'pk': room.pk}), data=req_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_detail.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Master Bedroom')
self.assertContains(response, 'Master Suite')
post_room = Room.objects.get(pk=room.pk)
self.assertEqual(room.id, post_room.id)
self.assertEqual(post_room.name, 'Master Suite')
self.assertEqual(post_room.price, Decimal('799.99'))
self.assertEqual(post_room.description, 'Looking for a mature student!')
self.assertFalse(post_room.is_available)
self.assertFalse(post_room.furnished)
self.assertTrue(post_room.is_accessible)
self.assertTrue(post_room.open_to_students)
self.assertTrue(post_room.female_only)
self.assertTrue(post_room.pet_friendly)
self.assertTrue(post_room.utilities_included)
self.assertTrue(post_room.parking)
def test_rooms_views_room_edit_post_not_logged_in(self):
print('Testing rooms.views.room_edit() POST not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
req_data = {
'name': 'Master Suite',
'price': 799.99,
'description': 'Looking for a mature student!',
'is_available': False,
'furnished': False,
'is_accessible': True,
'open_to_students': True,
'female_only': True,
'pet_friendly': True,
'utilities_included': True,
'parking': True
}
self.assertEqual(room.name, 'Master Bedroom')
self.assertEqual(room.price, 0.00)
self.assertEqual(room.description, '')
self.assertTrue(room.is_available)
self.assertFalse(room.furnished)
self.assertFalse(room.is_accessible)
self.assertTrue(room.open_to_students)
self.assertFalse(room.female_only)
self.assertFalse(room.pet_friendly)
self.assertFalse(room.utilities_included)
self.assertFalse(room.parking)
response = self.client.post(reverse('room_edit', kwargs={'pk': room.pk}), data=req_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Master Bedroom')
self.assertNotContains(response, 'Master Suite')
post_room = Room.objects.get(pk=room.pk)
self.assertEqual(room.id, post_room.id)
self.assertEqual(post_room.name, 'Master Bedroom')
self.assertEqual(post_room.price, 0.00)
self.assertEqual(post_room.description, '')
self.assertTrue(post_room.is_available)
self.assertFalse(post_room.furnished)
self.assertFalse(post_room.is_accessible)
self.assertTrue(post_room.open_to_students)
self.assertFalse(post_room.female_only)
self.assertFalse(post_room.pet_friendly)
self.assertFalse(post_room.utilities_included)
self.assertFalse(post_room.parking)
def test_rooms_views_room_edit_post_wrong_user(self):
print('Testing rooms.views.room_edit() POST wrong user')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
req_data = {
'name': 'Master Suite',
'price': 799.99,
'description': 'Looking for a mature student!',
'is_available': False,
'furnished': False,
'is_accessible': True,
'open_to_students': True,
'female_only': True,
'pet_friendly': True,
'utilities_included': True,
'parking': True
}
self.assertEqual(room.name, 'Master Bedroom')
self.assertEqual(room.price, 0.00)
self.assertEqual(room.description, '')
self.assertTrue(room.is_available)
self.assertFalse(room.furnished)
self.assertFalse(room.is_accessible)
self.assertTrue(room.open_to_students)
self.assertFalse(room.female_only)
self.assertFalse(room.pet_friendly)
self.assertFalse(room.utilities_included)
self.assertFalse(room.parking)
response = self.client.post(reverse('room_edit', kwargs={'pk': room.pk}), data=req_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Master Bedroom')
self.assertNotContains(response, 'Master Suite')
post_room = Room.objects.get(pk=room.pk)
self.assertEqual(room.id, post_room.id)
self.assertEqual(post_room.name, 'Master Bedroom')
self.assertEqual(post_room.price, 0.00)
self.assertEqual(post_room.description, '')
self.assertTrue(post_room.is_available)
self.assertFalse(post_room.furnished)
self.assertFalse(post_room.is_accessible)
self.assertTrue(post_room.open_to_students)
self.assertFalse(post_room.female_only)
self.assertFalse(post_room.pet_friendly)
self.assertFalse(post_room.utilities_included)
self.assertFalse(post_room.parking)
def test_rooms_views_room_delete_get(self):
print('Testing rooms.views.room_delete() GET')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.get(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'rooms/room_delete.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_delete_get_not_logged_in(self):
print('Testing rooms.views.room_delete() GET not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.get(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_delete_get_wrong_user(self):
print('Testing rooms.views.room_delete() GET wrong user')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.get(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_delete_post(self):
print('Testing rooms.views.room_delete() POST')
self.client.force_login(self.user)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.post(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'houses/house_detail.html')
self.assertNotContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Master Bedroom')
self.assertLess(post_count, pre_count)
def test_rooms_views_room_delete_post_not_logged_in(self):
print('Testing rooms.views.room_delete() POST not logged in')
self.client.logout()
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.post(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'account/login.html')
self.assertNotContains(response, '404')
self.assertContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
def test_rooms_views_room_delete_post_wrong_user(self):
print('Testing rooms.views.room_delete() POST wrong user')
self.client.force_login(self.user2)
room = Room.objects.create(user=self.user, house=self.house, name='Master Bedroom')
pre_count = Room.objects.count()
response = self.client.post(reverse('room_delete', kwargs={'pk': room.pk}), follow=True)
post_count = Room.objects.count()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'main/404.html')
self.assertContains(response, '404')
self.assertNotContains(response, 'Login')
self.assertNotContains(response, 'Edit')
self.assertNotContains(response, 'Master Bedroom')
self.assertEqual(post_count, pre_count)
| 51.23988
| 157
| 0.680779
| 4,043
| 34,177
| 5.609201
| 0.043532
| 0.104639
| 0.144501
| 0.026237
| 0.962342
| 0.95317
| 0.94735
| 0.942323
| 0.919173
| 0.903166
| 0
| 0.013368
| 0.2011
| 34,177
| 666
| 158
| 51.316817
| 0.817206
| 0
| 0
| 0.805511
| 0
| 0
| 0.184393
| 0.037101
| 0
| 0
| 0
| 0
| 0.50081
| 1
| 0.058347
| false
| 0.003241
| 0.009724
| 0
| 0.069692
| 0.056726
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9afe2e0fe5c7a26c0538228b2e5edc74c3ed50ac
| 117
|
py
|
Python
|
efilter_tests/benchmarks/__init__.py
|
Onager/dotty
|
b145131499be0c4b755fc2e2ac19be11a50bce6a
|
[
"Apache-2.0"
] | 54
|
2015-08-02T14:26:50.000Z
|
2021-10-21T02:44:25.000Z
|
efilter_tests/benchmarks/__init__.py
|
Onager/dotty
|
b145131499be0c4b755fc2e2ac19be11a50bce6a
|
[
"Apache-2.0"
] | 23
|
2015-11-27T10:08:58.000Z
|
2017-09-27T08:54:56.000Z
|
efilter_tests/benchmarks/__init__.py
|
Onager/dotty
|
b145131499be0c4b755fc2e2ac19be11a50bce6a
|
[
"Apache-2.0"
] | 16
|
2015-08-14T10:11:20.000Z
|
2021-10-21T02:44:18.000Z
|
"""EFILTER tests."""
from efilter_tests.benchmarks import hygdata_v3
from efilter_tests.benchmarks import small_csv
| 23.4
| 47
| 0.82906
| 16
| 117
| 5.8125
| 0.5625
| 0.387097
| 0.344086
| 0.55914
| 0.688172
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009434
| 0.094017
| 117
| 4
| 48
| 29.25
| 0.867925
| 0.119658
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b13f39e9c9cb86019d1801d188cbc9c0f7844b06
| 7,589
|
py
|
Python
|
src/azure-cli/azure/cli/command_modules/cognitiveservices/tests/latest/test_private_endpoint.py
|
xaliciayang/azure-cli
|
38c80c875e8a79d08d06a2f42ec82fd54934343e
|
[
"MIT"
] | 7
|
2020-04-26T09:54:05.000Z
|
2021-07-22T16:54:41.000Z
|
src/azure-cli/azure/cli/command_modules/cognitiveservices/tests/latest/test_private_endpoint.py
|
xaliciayang/azure-cli
|
38c80c875e8a79d08d06a2f42ec82fd54934343e
|
[
"MIT"
] | 120
|
2018-03-27T19:14:40.000Z
|
2020-12-10T23:53:35.000Z
|
src/azure-cli/azure/cli/command_modules/cognitiveservices/tests/latest/test_private_endpoint.py
|
xaliciayang/azure-cli
|
38c80c875e8a79d08d06a2f42ec82fd54934343e
|
[
"MIT"
] | 13
|
2020-06-30T16:23:36.000Z
|
2022-03-29T17:12:05.000Z
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import unittest
from azure.cli.testsdk import ScenarioTest, ResourceGroupPreparer
class CognitiveServicesPrivateEndpointTests(ScenarioTest):
@ResourceGroupPreparer()
def test_cognitiveservices_private_endpoint(self, resource_group):
sname = self.create_random_name(prefix='cs_cli_test_', length=16)
customdomain = self.create_random_name(prefix='csclitest', length=16)
self.kwargs.update({
'sname': sname,
'kind': 'TextAnalytics',
'sku': 'S0',
'vnetname': sname,
'pename': 'pe' + sname,
'customdomain': customdomain,
'location': 'westus'
})
# test to create cognitive services account
self.cmd('az cognitiveservices account create -n {sname} -g {rg} --kind {kind} --sku {sku} -l {location} '
'--custom-domain {customdomain}',
checks=[self.check('name', '{sname}'),
self.check('location', '{location}'),
self.check('sku.name', '{sku}'),
self.check('properties.provisioningState', 'Succeeded')])
# delete the cognitive services account
plResource = self.cmd('az network private-link-resource list -g {rg} -n {sname} '
'--type Microsoft.CognitiveServices/accounts').get_output_in_json()
self.assertTrue(len(plResource) > 0)
self.assertEqual(plResource[0]['name'], 'account')
self.assertEqual(plResource[0]['properties']['groupId'], 'account')
self.cmd('network vnet create --resource-group {rg} --name {vnetname} -l {location}')
self.cmd('network vnet subnet create --resource-group {rg} --name default'
' --vnet-name {vnetname} --address-prefixes 10.0.0.0/24')
account = self.cmd('az cognitiveservices account show -n {sname} -g {rg}').get_output_in_json()
self.kwargs.update({
'accountId': account['id']
})
self.cmd('az network vnet subnet update --name default --resource-group {rg} --vnet-name {vnetname} --disable-private-endpoint-network-policies true')
self.cmd('az network private-endpoint create -g {rg} -n {pename} --vnet-name {vnetname} --subnet default --private-connection-resource-id {accountId} --group-id account --connection-name {pename} -l {location}')
account = self.cmd('az cognitiveservices account show -n {sname} -g {rg}').get_output_in_json()
self.assertTrue(len(account['properties']['privateEndpointConnections']) > 0)
self.assertTrue(account['properties']['privateEndpointConnections'][0]['properties']['privateLinkServiceConnectionState']['status'], 'Approved')
self.kwargs.update({
'pecId': account['properties']['privateEndpointConnections'][0]['id']
})
ret = self.cmd('az network private-endpoint-connection show --id {pecId}').get_output_in_json()
self.assertTrue(ret['properties']['privateLinkServiceConnectionState']['status'], 'Approved')
ret = self.cmd('az network private-endpoint delete --name {pename} --resource-group {rg}')
self.assertEqual(ret.exit_code, 0)
# delete the cognitive services account
ret = self.cmd('az cognitiveservices account delete -n {sname} -g {rg}')
self.assertEqual(ret.exit_code, 0)
@ResourceGroupPreparer()
def test_cognitiveservices_private_endpoint_connection(self, resource_group):
sname = self.create_random_name(prefix='cs_cli_test_', length=16)
customdomain = self.create_random_name(prefix='csclitest', length=16)
self.kwargs.update({
'sname': sname,
'kind': 'TextAnalytics',
'sku': 'S1',
'vnetname': sname,
'pename': 'pe' + sname,
'customdomain': customdomain,
'location': 'centraluseuap'
})
# test to create cognitive services account
self.cmd('az cognitiveservices account create -n {sname} -g {rg} --kind {kind} --sku {sku} -l {location} '
'--custom-domain {customdomain}',
checks=[self.check('name', '{sname}'),
self.check('location', '{location}'),
self.check('sku.name', '{sku}'),
self.check('properties.provisioningState', 'Succeeded')])
# delete the cognitive services account
plResource = self.cmd('az network private-link-resource list -g {rg} -n {sname} '
'--type Microsoft.CognitiveServices/accounts').get_output_in_json()
self.assertTrue(len(plResource) > 0)
self.assertEqual(plResource[0]['name'], 'account')
self.assertEqual(plResource[0]['properties']['groupId'], 'account')
self.cmd('network vnet create --resource-group {rg} --name {vnetname} -l {location}')
self.cmd('network vnet subnet create --resource-group {rg} --name default'
' --vnet-name {vnetname} --address-prefixes 10.0.0.0/24')
account = self.cmd('az cognitiveservices account show -n {sname} -g {rg}').get_output_in_json()
self.kwargs.update({
'accountId': account['id']
})
self.cmd('az network vnet subnet update --name default --resource-group {rg} --vnet-name {vnetname} --disable-private-endpoint-network-policies true')
self.cmd('az network private-endpoint create -g {rg} -n {pename} --vnet-name {vnetname} --subnet default --private-connection-resource-id {accountId} --group-id account --connection-name {pename} -l {location}')
account = self.cmd('az cognitiveservices account show -n {sname} -g {rg}').get_output_in_json()
self.kwargs.update({
'pecId': account['properties']['privateEndpointConnections'][0]['id']
})
ret = self.cmd('az network private-endpoint-connection show --id {pecId}').get_output_in_json()
self.assertTrue(ret['properties']['privateLinkServiceConnectionState']['status'], 'Approved')
ret = self.cmd('az network private-endpoint-connection approve --id {pecId}').get_output_in_json()
self.assertTrue(ret['properties']['privateLinkServiceConnectionState']['status'], 'Approved')
ret = self.cmd('az network private-endpoint-connection reject --id {pecId}').get_output_in_json()
self.assertTrue(ret['properties']['privateLinkServiceConnectionState']['status'], 'Rejected')
ret = self.cmd('az network private-endpoint-connection list --id ' + account['id']).get_output_in_json()
self.assertTrue(len(ret) == 1)
self.cmd('az network private-endpoint-connection delete --id {pecId} --yes')
ret = self.cmd('az network private-endpoint-connection list --id ' + account['id']).get_output_in_json()
self.assertTrue(len(ret) == 0)
ret = self.cmd('az network private-endpoint delete --name {pename} --resource-group {rg}')
self.assertEqual(ret.exit_code, 0)
# delete the cognitive services account
ret = self.cmd('az cognitiveservices account delete -n {sname} -g {rg}')
self.assertEqual(ret.exit_code, 0)
if __name__ == '__main__':
unittest.main()
| 50.593333
| 219
| 0.616814
| 806
| 7,589
| 5.719603
| 0.150124
| 0.040998
| 0.044902
| 0.052061
| 0.891106
| 0.891106
| 0.865076
| 0.853362
| 0.8282
| 0.8282
| 0
| 0.006726
| 0.216366
| 7,589
| 149
| 220
| 50.932886
| 0.768455
| 0.075372
| 0
| 0.813725
| 0
| 0.058824
| 0.46346
| 0.111048
| 0
| 0
| 0
| 0
| 0.176471
| 1
| 0.019608
| false
| 0
| 0.019608
| 0
| 0.04902
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b16055e72a419e299c2cc38693078d3b8f167d6a
| 9,225
|
py
|
Python
|
scielomanager/articletrack/tests/tests_modelmanagers.py
|
jamilatta/scielo-manager
|
d506c6828ba9b1089faa164bc42ba29a0f228e61
|
[
"BSD-2-Clause"
] | null | null | null |
scielomanager/articletrack/tests/tests_modelmanagers.py
|
jamilatta/scielo-manager
|
d506c6828ba9b1089faa164bc42ba29a0f228e61
|
[
"BSD-2-Clause"
] | null | null | null |
scielomanager/articletrack/tests/tests_modelmanagers.py
|
jamilatta/scielo-manager
|
d506c6828ba9b1089faa164bc42ba29a0f228e61
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
from django.test import TestCase
from articletrack import (
models,
modelmanagers,
)
from articletrack.tests import modelfactories
from journalmanager.tests.modelfactories import UserFactory, CollectionFactory, JournalFactory
class ArticleManagerTests(TestCase):
def _make_user(self, *collection):
user = UserFactory(is_active=True)
for coll in collection:
coll.add_user(user, is_manager=True)
return user
def test_manager_base_interface(self):
mandatory_attrs = ['all', 'active']
for attr in mandatory_attrs:
self.assertTrue(hasattr(models.Article.userobjects, attr))
def test_queryset_base_interface(self):
mandatory_attrs = ['all', 'active', 'available', 'unavailable']
mm = modelmanagers.ArticleQuerySet()
for attr in mandatory_attrs:
self.assertTrue(hasattr(mm, attr))
def test_all_returns_user_objects_no_matter_the_active_context(self):
article1 = modelfactories.ArticleFactory.create()
article2 = modelfactories.ArticleFactory.create()
collection1 = article1.journals.all()[0].collections.all()[0]
collection2 = article2.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_user_collections():
return user.user_collection.all()
user_articles = models.Article.userobjects.all(
get_all_collections=get_user_collections)
self.assertEqual(user_articles.count(), 2)
self.assertIn(article1, user_articles)
self.assertIn(article2, user_articles)
def test_active_returns_user_objects_bound_to_the_active_context(self):
article1 = modelfactories.ArticleFactory.create()
article2 = modelfactories.ArticleFactory.create()
collection1 = article1.journals.all()[0].collections.all()[0]
collection2 = article2.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_active_collection():
return user.user_collection.get(usercollections__is_default=True)
user_articles = models.Article.userobjects.active(
get_active_collection=get_active_collection)
self.assertEqual(user_articles.count(), 1)
self.assertIn(article2, user_articles)
class CheckinManagerTests(TestCase):
def _make_user(self, *collection):
user = UserFactory(is_active=True)
for coll in collection:
coll.add_user(user, is_manager=True)
return user
def test_manager_base_interface(self):
mandatory_attrs = ['all', 'active']
for attr in mandatory_attrs:
self.assertTrue(hasattr(models.Checkin.userobjects, attr))
def test_queryset_base_interface(self):
mandatory_attrs = ['all', 'active', 'available', 'unavailable']
mm = modelmanagers.CheckinQuerySet()
for attr in mandatory_attrs:
self.assertTrue(hasattr(mm, attr))
def test_all_returns_user_objects_no_matter_the_active_context(self):
checkin1 = modelfactories.CheckinFactory.create()
checkin2 = modelfactories.CheckinFactory.create()
collection1 = checkin1.article.journals.all()[0].collections.all()[0]
collection2 = checkin2.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_user_collections():
return user.user_collection.all()
user_checkins = models.Checkin.userobjects.all(
get_all_collections=get_user_collections)
self.assertEqual(user_checkins.count(), 2)
self.assertIn(checkin1, user_checkins)
self.assertIn(checkin2, user_checkins)
def test_active_returns_user_objects_bound_to_the_active_context(self):
checkin1 = modelfactories.CheckinFactory.create()
checkin2 = modelfactories.CheckinFactory.create()
collection1 = checkin1.article.journals.all()[0].collections.all()[0]
collection2 = checkin2.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_active_collection():
return user.user_collection.get(usercollections__is_default=True)
user_checkins = models.Checkin.userobjects.active(
get_active_collection=get_active_collection)
self.assertEqual(user_checkins.count(), 1)
self.assertIn(checkin2, user_checkins)
class TicketManagerTests(TestCase):
def _make_user(self, *collection):
user = UserFactory(is_active=True)
for coll in collection:
coll.add_user(user, is_manager=True)
return user
def test_manager_base_interface(self):
mandatory_attrs = ['all', 'active']
for attr in mandatory_attrs:
self.assertTrue(hasattr(models.Ticket.userobjects, attr))
def test_queryset_base_interface(self):
mandatory_attrs = ['all', 'active', 'available', 'unavailable']
mm = modelmanagers.TicketQuerySet()
for attr in mandatory_attrs:
self.assertTrue(hasattr(mm, attr))
def test_all_returns_user_objects_no_matter_the_active_context(self):
ticket1 = modelfactories.TicketFactory.create()
ticket2 = modelfactories.TicketFactory.create()
collection1 = ticket1.article.journals.all()[0].collections.all()[0]
collection2 = ticket2.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_user_collections():
return user.user_collection.all()
user_tickets = models.Ticket.userobjects.all(
get_all_collections=get_user_collections)
self.assertEqual(user_tickets.count(), 2)
self.assertIn(ticket1, user_tickets)
self.assertIn(ticket2, user_tickets)
def test_active_returns_user_objects_bound_to_the_active_context(self):
ticket1 = modelfactories.TicketFactory.create()
ticket2 = modelfactories.TicketFactory.create()
collection1 = ticket1.article.journals.all()[0].collections.all()[0]
collection2 = ticket2.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_active_collection():
return user.user_collection.get(usercollections__is_default=True)
user_tickets = models.Ticket.userobjects.active(
get_active_collection=get_active_collection)
self.assertEqual(user_tickets.count(), 1)
self.assertIn(ticket2, user_tickets)
class CommentManagerTests(TestCase):
def _make_user(self, *collection):
user = UserFactory(is_active=True)
for coll in collection:
coll.add_user(user, is_manager=True)
return user
def test_manager_base_interface(self):
mandatory_attrs = ['all', 'active']
for attr in mandatory_attrs:
self.assertTrue(hasattr(models.Comment.userobjects, attr))
def test_queryset_base_interface(self):
mandatory_attrs = ['all', 'active', 'available', 'unavailable']
mm = modelmanagers.CommentQuerySet()
for attr in mandatory_attrs:
self.assertTrue(hasattr(mm, attr))
def test_all_returns_user_objects_no_matter_the_active_context(self):
comment1 = modelfactories.CommentFactory.create()
comment2 = modelfactories.CommentFactory.create()
collection1 = comment1.ticket.article.journals.all()[0].collections.all()[0]
collection2 = comment2.ticket.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_user_collections():
return user.user_collection.all()
user_comments = models.Comment.userobjects.all(
get_all_collections=get_user_collections)
self.assertEqual(user_comments.count(), 2)
self.assertIn(comment1, user_comments)
self.assertIn(comment2, user_comments)
def test_active_returns_user_objects_bound_to_the_active_context(self):
comment1 = modelfactories.CommentFactory.create()
comment2 = modelfactories.CommentFactory.create()
collection1 = comment1.ticket.article.journals.all()[0].collections.all()[0]
collection2 = comment2.ticket.article.journals.all()[0].collections.all()[0]
user = self._make_user(collection1, collection2)
collection2.make_default_to_user(user)
def get_active_collection():
return user.user_collection.get(usercollections__is_default=True)
user_comments = models.Comment.userobjects.active(
get_active_collection=get_active_collection)
self.assertEqual(user_comments.count(), 1)
self.assertIn(comment2, user_comments)
| 34.04059
| 94
| 0.697019
| 1,010
| 9,225
| 6.10099
| 0.094059
| 0.020772
| 0.031159
| 0.059721
| 0.915612
| 0.831548
| 0.831548
| 0.831548
| 0.831548
| 0.831548
| 0
| 0.017079
| 0.206612
| 9,225
| 270
| 95
| 34.166667
| 0.824839
| 0.001409
| 0
| 0.777143
| 0
| 0
| 0.016504
| 0
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.16
| false
| 0
| 0.022857
| 0.045714
| 0.274286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b18aed78c9c9eec17732ce36e6bb4c326d66a0fb
| 8,372
|
py
|
Python
|
Certificates/freeCodeCamp Certificates/Scientific Computing with Python Projects/time_calculator.py
|
icantfindmyspider/SurveyForm
|
1252037b236ea8bed4dc65ba193dbfe58563a50a
|
[
"MIT"
] | 1
|
2021-11-19T05:30:03.000Z
|
2021-11-19T05:30:03.000Z
|
Certificates/freeCodeCamp Certificates/Scientific Computing with Python Projects/time_calculator.py
|
icantfindmyspider/Playground
|
13d1debc33eb18d848cfa33437a62d9c4fabf210
|
[
"MIT"
] | null | null | null |
Certificates/freeCodeCamp Certificates/Scientific Computing with Python Projects/time_calculator.py
|
icantfindmyspider/Playground
|
13d1debc33eb18d848cfa33437a62d9c4fabf210
|
[
"MIT"
] | null | null | null |
def add_time(start, duration, day = ''):
day = day.upper();
'''
Sunday: 0
Monday: 1
Tuesday: 2
Wednesday 3:
Thursday 4:
Friday 5:
Saturday: 6
'''
index = start.index(':');
duration_index = duration.index(':');
def check_day(added_day):
day_in_digits = -1;
if day == 'SUNDAY':
day_in_digits = 0;
elif day == 'MONDAY':
day_in_digits = 1;
elif day == 'TUESDAY':
day_in_digits = 2;
elif day == 'WEDNESDAY':
day_in_digits = 3;
elif day == 'THURSDAY':
day_in_digits = 4;
elif day == 'FRIDAY':
day_in_digits = 5;
elif day == 'SATURDAY':
day_in_digits = 6;
added_day = (added_day + day_in_digits) % 7;
if added_day == 0:
return 'Sunday';
elif added_day == 1:
return 'Monday';
elif added_day == 2:
return 'Tuesday';
elif added_day == 3:
return 'Wednesday';
elif added_day == 4:
return 'Thursday';
elif added_day == 5:
return 'Friday';
elif added_day == 6:
return 'Saturday';
index = start.index(':');
duration_index = duration.index(':');
def to_pm():
counting_days = 0;
count = 0;
if len(duration) > 4:
count = len(duration) - 4;
hours_to_be_added = '';
minutes = '';
minutes_to_be_added = '';
index_of_pm = start.index('P');
hours = start[0:index];
if len(hours) == 2:
hours_to_be_added = duration[0:duration_index];
minutes_to_be_added = duration[index + count:];
else:
hours_to_be_added = duration[0:index + 1];
minutes_to_be_added = duration[index + count + 1:];
if len(duration) == 6:
hours_to_be_added = duration[0:duration_index]
minutes = start[index + 1:index_of_pm - 1];
try:
total_hours = int(hours) + int(hours_to_be_added);
except:
hours_to_be_added = duration[0:index];
total_hours = int(hours) + int(hours_to_be_added);
total_minutes = int(minutes) + int(minutes_to_be_added);
meridiem = "PM";
if total_minutes >= 60:
hours_from_minutes = total_minutes // 60;
minutes_left = total_minutes % 60;
total_minutes = minutes_left;
total_hours += hours_from_minutes;
if total_hours >= 12:
days = total_hours // 12;
total_hours = total_hours % 12;
counting_days = days // 2;
if days % 2 == 1:
meridiem = 'AM';
counting_days += 1;
if total_hours == 0:
total_hours = 12;
if not day == '':
time = str(total_hours) + ':' + str(total_minutes) + ' ' + meridiem;
if total_minutes == 0:
time = str(total_hours) + ':' + str(total_minutes) + '0 ' + meridiem;
elif len(str(total_minutes)) == 1:
time = str(total_hours) + ':' + '0' + str(total_minutes) + ' ' + meridiem;
if counting_days == 0:
time += ', ' + day.capitalize();
elif counting_days == 1:
day_is = check_day(counting_days);
time += ', ' + day_is + ' (next day)';
else:
day_is = check_day(counting_days);
time += ', ' + day_is + ' (' + str(counting_days) + ' days later)';
else:
if total_minutes == 0:
time = str(total_hours) + ':' + str(total_minutes) + '0' + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
if total_minutes == 1 or total_minutes == 2 or total_minutes == 3 or total_minutes == 4 or total_minutes == 5 or total_minutes == 6 or total_minutes == 7 or total_minutes == 8 or total_minutes == 9:
time = str(total_hours) + ':' + '0' + str(total_minutes) + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
else:
time = str(total_hours) + ':' + str(total_minutes) + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
return time;
def to_am():
counting_days = 0;
count = 0;
if len(duration) > 4:
count = len(duration) - 4;
hours_to_be_added = '';
minutes = '';
minutes_to_be_added = '';
index_of_pm = start.index('A');
hours = start[0:index];
if len(hours) == 2:
hours_to_be_added = duration[0:duration_index];
minutes_to_be_added = duration[index + count:];
else:
hours_to_be_added = duration[0:index + 1];
minutes_to_be_added = duration[index + count + 1:];
if len(duration) == 6:
hours_to_be_added = duration[0:duration_index]
minutes = start[index + 1:index_of_pm - 1];
try:
total_hours = int(hours) + int(hours_to_be_added);
except:
hours_to_be_added = duration[0:index];
total_hours = int(hours) + int(hours_to_be_added);
total_minutes = int(minutes) + int(minutes_to_be_added);
meridiem = "AM";
if total_minutes >= 60:
hours_from_minutes = total_minutes // 60;
minutes_left = total_minutes % 60;
total_minutes = minutes_left;
total_hours += hours_from_minutes;
if total_hours >= 12:
days = total_hours // 12;
total_hours = total_hours % 12;
if days % 2 == 1:
meridiem = 'PM';
if total_hours == 0:
total_hours = 12;
counting_days = days / 2;
if not day == '':
time = str(total_hours) + ':' + str(total_minutes) + ' ' + meridiem;
if total_minutes == 0:
time = str(total_hours) + ':' + str(total_minutes) + '0 ' + meridiem;
elif len(str(total_minutes)) == 1:
time = str(total_hours) + ':' + '0' + str(total_minutes) + ' ' + meridiem;
if counting_days == 0:
time += ', ' + day.capitalize();
elif counting_days == 1:
day_is = check_day(counting_days);
time += ', ' + day_is + ' (next day)';
else:
day_is = check_day(counting_days);
time += ', ' + day_is + ' (' + str(counting_days) + ' days later)';
else:
if total_minutes == 0:
time = str(total_hours) + ':' + str(total_minutes) + '0' + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
if total_minutes == 1 or total_minutes == 2 or total_minutes == 3 or total_minutes == 4 or total_minutes == 5 or total_minutes == 6 or total_minutes == 7 or total_minutes == 8 or total_minutes == 9:
time = str(total_hours) + ':' + '0' + str(total_minutes) + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
else:
time = str(total_hours) + ':' + str(total_minutes) + ' ' + meridiem;
if counting_days == 1:
time += ' ' + '(next day)';
if counting_days > 1:
time += ' (' + str(counting_days) + ' days later)';
return time;
for i in start:
if i == 'P':
return to_pm();
elif i == 'A':
return to_am();
print(add_time("11:59 PM", "24:01", 'WeDnEsday'));
| 39.490566
| 210
| 0.484711
| 933
| 8,372
| 4.083601
| 0.073955
| 0.144882
| 0.051969
| 0.051444
| 0.849344
| 0.84147
| 0.829921
| 0.819948
| 0.784252
| 0.784252
| 0
| 0.028466
| 0.387363
| 8,372
| 212
| 211
| 39.490566
| 0.714369
| 0
| 0
| 0.736842
| 0
| 0
| 0.046269
| 0
| 0.031579
| 0
| 0
| 0
| 0
| 1
| 0.021053
| false
| 0
| 0
| 0
| 0.078947
| 0.005263
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b194a91a3269609456347e403553e1a3541d0dc1
| 2,680
|
py
|
Python
|
api/migrations/0112_auto_20210202_1825.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 11
|
2018-06-11T06:05:12.000Z
|
2022-03-25T09:31:44.000Z
|
api/migrations/0112_auto_20210202_1825.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 498
|
2017-11-07T21:20:13.000Z
|
2022-03-31T14:37:18.000Z
|
api/migrations/0112_auto_20210202_1825.py
|
IFRCGo/ifrcgo-api
|
c1c3e0cf1076ab48d03db6aaf7a00f8485ca9e1a
|
[
"MIT"
] | 6
|
2018-04-11T13:29:50.000Z
|
2020-07-16T16:52:11.000Z
|
# Generated by Django 2.2.13 on 2021-02-02 18:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0111_externalpartner_externalpartnercategory_fieldreportexternalpartner_fieldreportexternalpartnercategor'),
]
operations = [
migrations.AddField(
model_name='externalpartner',
name='name_ar',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartner',
name='name_en',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartner',
name='name_es',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartner',
name='name_fr',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartnercategory',
name='name_ar',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartnercategory',
name='name_en',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartnercategory',
name='name_es',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='externalpartnercategory',
name='name_fr',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='supportedactivity',
name='name_ar',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='supportedactivity',
name='name_en',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='supportedactivity',
name='name_es',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
migrations.AddField(
model_name='supportedactivity',
name='name_fr',
field=models.CharField(max_length=200, null=True, verbose_name='name'),
),
]
| 36.216216
| 125
| 0.598507
| 258
| 2,680
| 6.015504
| 0.158915
| 0.123711
| 0.177835
| 0.208763
| 0.853093
| 0.853093
| 0.853093
| 0.826031
| 0.826031
| 0.826031
| 0
| 0.029197
| 0.284328
| 2,680
| 73
| 126
| 36.712329
| 0.779979
| 0.017164
| 0
| 0.895522
| 1
| 0
| 0.174772
| 0.074848
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.059701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
492016b3f42005d40698a4f06153894320b6bac8
| 18,546
|
py
|
Python
|
ui/tests/testInvasions.py
|
ludek77/gow
|
f3609e7a70c953d83d2a56d871101d118b534bf5
|
[
"CC0-1.0"
] | null | null | null |
ui/tests/testInvasions.py
|
ludek77/gow
|
f3609e7a70c953d83d2a56d871101d118b534bf5
|
[
"CC0-1.0"
] | null | null | null |
ui/tests/testInvasions.py
|
ludek77/gow
|
f3609e7a70c953d83d2a56d871101d118b534bf5
|
[
"CC0-1.0"
] | null | null | null |
from ui.tests.TestBase import TestBase
from ui.models import Turn
class TestInvasions(TestBase):
def testInvasion(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Single invasion')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertNoUnit(turn, 'Germany')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
def testInvasion2(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea','Norwegian Sea','Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Double invasion')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertNoUnit(turn, 'Germany')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'ok')
def testSelfInvasion(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Germany'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Self invasion')
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
def testNotStrongestSelfInvasion(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Germany'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Baltic Sea', 'attack', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Not Strongest Self invasion')
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertResult(turn.previous, 'Germany', 'fail.not-strongest')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Baltic Sea', 'fail.not-strongest')
def testInvasionFromAttack(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertNoUnit(turn, 'Norway')
self.assertUnit(turn, 'France', 'Army', 'Spain')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'France', 'attack', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Invasion from attack')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertResult(turn.previous, 'France', 'ok')
self.assertNoUnit(turn, 'France')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
def testNoTransport(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea','Norwegian Sea','Norway'])
self.setAssertCommand(turn, 'North Sea', 'defend')
self.setAssertCommand(turn, 'Norwegian Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: No Transport')
# verify units
self.assertResult(turn.previous, 'Germany', 'fail.transport-missing')
self.assertNoUnit(turn, 'Norway')
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'ok')
def testNoTransport2(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea','Norwegian Sea','Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'defend')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: No second Transport')
# verify units
self.assertResult(turn.previous, 'Germany', 'fail.transport-missing')
self.assertNoUnit(turn, 'Norway')
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'ok')
def testAttackedTransport(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'attack', 'North Sea')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: No second Transport')
# verify units
self.assertResult(turn.previous, 'Germany', 'fail.transport-canceled')
self.assertNoUnit(turn, 'Norway')
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'fail.canceled-by-attack')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'fail.defence-stronger')
def testAttackedTransport(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'Sweden', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: No second Transport')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertNoUnit(turn, 'Germany')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertResult(turn.previous, 'Sweden', 'fail.transport-missing')
def testMoveNotCancelingTransport(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'move', 'North Sea')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Move not canceling Transport')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertNoUnit(turn, 'Germany')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'fail.target-not-moving:par_0')
def testNotStrongestInvasion(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'attack', 'Norway')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Not strongest Invasion')
# verify units
self.assertResult(turn.previous, 'Germany', 'fail.not-strongest')
self.assertNoUnit(turn, 'Norway')
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'fail.not-strongest')
def testSupportedInvasion(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertNoUnit(turn, 'Norway')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Norway'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Norwegian Sea', 'attack', 'Norway')
self.setAssertCommand(turn, 'Sweden', 'support_attack', ['Norway','Germany'])
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Supported invasion')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertNoUnit(turn, 'Germany')
self.assertUnit(turn, 'Norway', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Norwegian Sea', 'Ship', 'Russia')
self.assertResult(turn.previous, 'Norwegian Sea', 'fail.not-strongest')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertResult(turn.previous, 'Sweden', 'ok')
def testDefenceStronger(self):
turn = Turn.objects.get(pk=1)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['North Sea',None,'Sweden'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Germany')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Defence Stronger')
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertResult(turn.previous, 'Germany', 'fail.defence-stronger')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertResult(turn.previous, 'Sweden', 'ok')
def testSupportedInvasionRetreat(self):
turn = Turn.objects.get(pk=1)
self.setDefaultEscapes(turn)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertUnit(turn, 'France', 'Army', 'Spain')
# set commands
self.setAssertCommand(turn, 'Germany', 'defend')
self.setAssertCommand(turn, 'North Sea', 'transport', 'Sweden')
self.setAssertCommand(turn, 'Sweden', 'invade', ['North Sea',None,'Germany'])
self.setAssertCommand(turn, 'France', 'support_attack', ['Germany','Sweden'])
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Supported invasion, retreating defender')
# verify units
self.assertResult(turn.previous, 'Germany', 'escaped')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertResult(turn.previous, 'Sweden', 'ok')
self.assertResult(turn.previous, 'France', 'ok')
self.assertNoUnit(turn, 'Sweden')
self.assertUnit(turn, 'Germany', 'Army', 'Russia')
self.assertUnit(turn, 'Poland', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'France', 'Army', 'Spain')
def testInvasionSwitch(self):
turn = Turn.objects.get(pk=1)
self.setDefaultEscapes(turn)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['Baltic Sea',None,'Sweden'])
self.setAssertCommand(turn, 'Baltic Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Sweden', 'invade', ['North Sea',None,'Germany'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Sweden')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Invasion switch')
# verify units
self.assertResult(turn.previous, 'Germany', 'ok')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertResult(turn.previous, 'Sweden', 'ok')
self.assertResult(turn.previous, 'Baltic Sea', 'ok')
self.assertUnit(turn, 'Germany', 'Army', 'Russia')
self.assertUnit(turn, 'Sweden', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
def testTwoInvasionsToOne(self):
turn = Turn.objects.get(pk=1)
self.setDefaultEscapes(turn)
# verify units
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
# set commands
self.setAssertCommand(turn, 'Germany', 'invade', ['Baltic Sea',None,'Denmark'])
self.setAssertCommand(turn, 'Baltic Sea', 'transport', 'Germany')
self.setAssertCommand(turn, 'Sweden', 'invade', ['North Sea',None,'Denmark'])
self.setAssertCommand(turn, 'North Sea', 'transport', 'Sweden')
# calculate turn
turn = self.assertNextTurn(turn, '2000', 'Invasions: Two Invasions to one')
# verify units
self.assertResult(turn.previous, 'Germany', 'fail.not-strongest')
self.assertResult(turn.previous, 'North Sea', 'ok')
self.assertResult(turn.previous, 'Sweden', 'fail.not-strongest')
self.assertResult(turn.previous, 'Baltic Sea', 'ok')
self.assertUnit(turn, 'Germany', 'Army', 'Spain')
self.assertUnit(turn, 'Sweden', 'Army', 'Russia')
self.assertUnit(turn, 'North Sea', 'Ship', 'Spain')
self.assertUnit(turn, 'Baltic Sea', 'Ship', 'Russia')
| 52.242254
| 103
| 0.606384
| 1,905
| 18,546
| 5.901837
| 0.048294
| 0.123277
| 0.158499
| 0.124522
| 0.942097
| 0.935604
| 0.930268
| 0.91408
| 0.912835
| 0.902339
| 0
| 0.005847
| 0.234552
| 18,546
| 354
| 104
| 52.389831
| 0.786137
| 0.046533
| 0
| 0.790441
| 0
| 0
| 0.264554
| 0.010543
| 0
| 0
| 0
| 0
| 0.860294
| 1
| 0.058824
| false
| 0
| 0.007353
| 0
| 0.069853
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4960f2ce9ed3ed13caf540148ca1e33ad989dd3e
| 4,625
|
py
|
Python
|
tests/test_pm.py
|
ekeyme/bio-pm
|
31fe50cd4d90a05cb709c1c75a663b03d0cde6fe
|
[
"MIT"
] | null | null | null |
tests/test_pm.py
|
ekeyme/bio-pm
|
31fe50cd4d90a05cb709c1c75a663b03d0cde6fe
|
[
"MIT"
] | null | null | null |
tests/test_pm.py
|
ekeyme/bio-pm
|
31fe50cd4d90a05cb709c1c75a663b03d0cde6fe
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Unit test for pm."""
import sys
import unittest
from os.path import dirname, realpath
from pm import analyze
from pm.status import Y, Conserved, PM, NA
from pm.pattern import TranslatedPattern, PlainPattern
class RoutineTest(unittest.TestCase):
"""RoutineTest"""
def test_analyze_with_Y(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGTT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGTT'
status = analyze(seq, stdseq)
self.assertEqual(status, Y(stdseq=stdseq, aa_pm=0))
self.assertEqual(status.seq, seq)
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 0)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 0)
self.assertEqual(status.aa_pm, 0)
self.assertIsInstance(status.pattern, TranslatedPattern)
def test_analyze_with_Conserved(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGCT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGCC'
status = analyze(seq, stdseq)
self.assertEqual(status, Conserved(nt_pm=1, stdseq=stdseq, aa_pm=0))
self.assertEqual(status.seq, seq)
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 0)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 1)
self.assertEqual(status.aa_pm, 0)
self.assertIsInstance(status.pattern, TranslatedPattern)
def test_analyze_with_PM(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAAACT'
status = analyze(seq, stdseq)
self.assertEqual(status, PM(nt_pm=1, aa_pm=1, stdseq=stdseq))
self.assertEqual(status.seq, seq)
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 0)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 1)
self.assertEqual(status.aa_pm, 1)
self.assertIsInstance(status.pattern, TranslatedPattern)
def test_analyze_with_NA(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGC-TT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGTT'
status = analyze(seq, stdseq)
self.assertEqual(status, NA(gaps=1, stdseq=stdseq, aa_pm=0))
self.assertEqual(status.seq, seq)
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 1)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 0)
self.assertEqual(status.aa_pm, 0)
self.assertIsInstance(status.pattern, TranslatedPattern)
def test_analyze_with_Y_with_nontranslate(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGTT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGTT'
status = analyze(seq, stdseq, translate=False)
self.assertEqual(status, Y(stdseq=stdseq, nt_pm=0, gaps=0, aa_pm=None))
self.assertEqual(status.seq, seq.replace('-', ''))
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 0)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 0)
self.assertEqual(status.aa_pm, None)
self.assertIsInstance(status.pattern, PlainPattern)
def test_analyze_with_NA_with_nontranslate(self):
"""pm.analyze should give the expection value"""
seq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGCT'
stdseq = 'ATGTCGTTCTGCAGCTTCTTCGGGGGCGAGGTTTTCCAGAATCACTTTGAACCTGGCGCC'
status = analyze(seq, stdseq, translate=False)
self.assertEqual(status, NA(nt_pm=1, aa_pm=None, gaps=0, stdseq=stdseq))
self.assertEqual(status.seq, seq.replace('-', ''))
self.assertEqual(status.stdseq, stdseq)
self.assertEqual(status.gaps, 0)
self.assertEqual(status.length, len(seq))
self.assertEqual(status.nt_pm, 1)
self.assertEqual(status.aa_pm, None)
self.assertIsInstance(status.pattern, PlainPattern)
if __name__ == '__main__':
unittest.main()
| 40.217391
| 80
| 0.702486
| 486
| 4,625
| 6.578189
| 0.131687
| 0.19706
| 0.275884
| 0.101345
| 0.84548
| 0.826713
| 0.822333
| 0.794808
| 0.794808
| 0.684079
| 0
| 0.007777
| 0.19373
| 4,625
| 115
| 81
| 40.217391
| 0.849558
| 0.071568
| 0
| 0.679012
| 0
| 0
| 0.168822
| 0.166471
| 0
| 0
| 0
| 0
| 0.592593
| 1
| 0.074074
| false
| 0
| 0.074074
| 0
| 0.160494
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4983cd72a878db80d217b10e1c88d7c29fdbad51
| 2,501
|
py
|
Python
|
nombre turtle/nombre turtle.py
|
NicolleArevalo2/Taller-de-programacion-
|
d1d4a19688b2fcfd92e21506317ec425fb388ae4
|
[
"MIT"
] | null | null | null |
nombre turtle/nombre turtle.py
|
NicolleArevalo2/Taller-de-programacion-
|
d1d4a19688b2fcfd92e21506317ec425fb388ae4
|
[
"MIT"
] | null | null | null |
nombre turtle/nombre turtle.py
|
NicolleArevalo2/Taller-de-programacion-
|
d1d4a19688b2fcfd92e21506317ec425fb388ae4
|
[
"MIT"
] | null | null | null |
import turtle
tortuguita=turtle.Turtle()
tortuguita.pensize(15)
tortuguita.pencolor("Yellow")
tortuguita.shape("arrow")
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.right(150)
tortuguita.forward(80)
tortuguita.left(150)
tortuguita.forward(70)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("light green")
tortuguita.penup()
tortuguita.goto((40, 0))
tortuguita.forward(20)
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("light blue")
tortuguita.penup()
tortuguita.goto((80, 0))
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.right(90)
tortuguita.forward(30)
tortuguita.left(180)
tortuguita.forward(30)
tortuguita.goto((80, 0))
tortuguita.left(180)
tortuguita.forward(30)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("light pink")
tortuguita.penup()
tortuguita.goto((130, 0))
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.right(90)
tortuguita.forward(50)
tortuguita.right(90)
tortuguita.forward(70)
tortuguita.right(90)
tortuguita.forward(50)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("Cyan")
tortuguita.penup()
tortuguita.goto((200, 0))
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.goto((200, 0))
tortuguita.right(90)
tortuguita.forward(30)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("Cyan")
tortuguita.penup()
tortuguita.goto((250, 0))
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.goto((250, 0))
tortuguita.right(90)
tortuguita.forward(30)
tortuguita=turtle.Turtle()
tortuguita.shape("arrow")
tortuguita.pensize(15)
tortuguita.pencolor("Purple")
tortuguita.penup()
tortuguita.goto((300, 0))
tortuguita.pendown()
tortuguita.left(90)
tortuguita.forward(70)
tortuguita.right(90)
tortuguita.forward(30)
tortuguita.right(90)
tortuguita.forward(30)
tortuguita.right(90)
tortuguita.forward(30)
tortuguita.right(-140)
tortuguita.forward(0)
tortuguita.goto((300, 0))
tortuguita.left(-320)
tortuguita.forward(30)
t=turtle.Turtle()
t.speed(0)
t.pencolor("green")
t.pensize(2)
for x in range (10):
t.rt(60)
for y in range (10):
t.fd(15)
t.rt(15)
t.rt(90)
for z in range (10):
t.fd(15)
t.rt(15)
t.rt(90)
| 20.333333
| 34
| 0.761295
| 336
| 2,501
| 5.666667
| 0.14881
| 0.205357
| 0.159664
| 0.13708
| 0.817752
| 0.753676
| 0.753676
| 0.72584
| 0.69958
| 0.682248
| 0
| 0.073012
| 0.079968
| 2,501
| 123
| 35
| 20.333333
| 0.754455
| 0
| 0
| 0.790909
| 0
| 0
| 0.036371
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009091
| 0
| 0.009091
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7707cd9bb02b3400daa53f7b81e3b2478c4b7a84
| 9,383
|
py
|
Python
|
tools/c7n_gcp/tests/test_kms.py
|
dnouri/cloud-custodian
|
4e8b3b45f60731df942ffe6b61645416d7a67daa
|
[
"Apache-2.0"
] | 1
|
2020-09-07T21:10:29.000Z
|
2020-09-07T21:10:29.000Z
|
tools/c7n_gcp/tests/test_kms.py
|
dnouri/cloud-custodian
|
4e8b3b45f60731df942ffe6b61645416d7a67daa
|
[
"Apache-2.0"
] | 1
|
2021-02-10T02:20:45.000Z
|
2021-02-10T02:20:45.000Z
|
tools/c7n_gcp/tests/test_kms.py
|
dnouri/cloud-custodian
|
4e8b3b45f60731df942ffe6b61645416d7a67daa
|
[
"Apache-2.0"
] | 1
|
2021-10-15T11:29:54.000Z
|
2021-10-15T11:29:54.000Z
|
# Copyright 2019 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from gcp_common import BaseTest, event_data
class KmsKeyRingTest(BaseTest):
def test_kms_keyring_query_unspecified_location(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
resource_name = 'projects/{}/locations/{}/keyRings/{}'.\
format(project_id, location_name, keyring_name)
session_factory = self.replay_flight_data(
'kms-keyring-query-unspecified_location', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-keyring-dryrun',
'resource': 'gcp.kms-keyring'},
session_factory=session_factory)
resources = policy.run()
self.assertEqual(resources[0]['name'], resource_name)
def test_kms_keyring_query_array(self):
project_id = 'cloud-custodian'
location_name_1 = 'asia-east1'
location_name_2 = 'us-central1'
keyring_name_1 = 'cloud-custodian-asia'
keyring_name_2 = 'cloud-custodian'
resource_name_1 = 'projects/{}/locations/{}/keyRings/{}'.\
format(project_id, location_name_1, keyring_name_1)
resource_name_2 = 'projects/{}/locations/{}/keyRings/{}'. \
format(project_id, location_name_2, keyring_name_2)
session_factory = self.replay_flight_data('kms-keyring-query-array', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-keyring-dryrun',
'resource': 'gcp.kms-keyring',
'query': [{'location': [location_name_1, location_name_2]}]},
session_factory=session_factory)
resources = policy.run()
self.assertEqual(resources[0]['name'], resource_name_1)
self.assertEqual(resources[1]['name'], resource_name_2)
def test_kms_keyring_query(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
resource_name = 'projects/{}/locations/{}/keyRings/{}'.\
format(project_id, location_name, keyring_name)
session_factory = self.replay_flight_data('kms-keyring-query', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-keyring-dryrun',
'resource': 'gcp.kms-keyring',
'query': [{'location': location_name}]},
session_factory=session_factory)
resources = policy.run()
self.assertEqual(resources[0]['name'], resource_name)
def test_kms_keyring_get(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
resource_name = 'projects/{}/locations/{}/keyRings/{}'. \
format(project_id, location_name, keyring_name)
session_factory = self.replay_flight_data('kms-keyring-get', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-keyring-dryrun',
'resource': 'gcp.kms-keyring',
'mode': {
'type': 'gcp-audit',
'methods': ['CreateKeyRing']
}},
session_factory=session_factory)
exec_mode = policy.get_execution_mode()
event = event_data('kms-keyring-create.json')
resources = exec_mode.run(event, None)
self.assertEqual(resources[0]['name'], resource_name)
class KmsCryptoKeyTest(BaseTest):
def test_kms_cryptokey_query(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
cryptokey_name = 'cloud-custodian'
parent_resource_name = 'projects/{}/locations/{}/keyRings/{}'\
.format(project_id, location_name, keyring_name)
resource_name = '{}/cryptoKeys/{}'.format(parent_resource_name, cryptokey_name)
session_factory = self.replay_flight_data('kms-cryptokey-query', project_id=project_id)
filter_parent_annotation_key = 'c7n:kms-keyring'
policy = self.load_policy(
{'name': 'gcp-kms-cryptokey-dryrun',
'resource': 'gcp.kms-cryptokey',
'query': [{'location': location_name}],
'filters': [{
'type': 'value',
'key': '\"{}\".name'.format(filter_parent_annotation_key),
'op': 'regex',
'value': parent_resource_name
}]},
session_factory=session_factory)
parent_annotation_key = policy.resource_manager.resource_type.get_parent_annotation_key()
# If fails there, policies using filters for the resource
# need to be updated since the key has been changed.
self.assertEqual(parent_annotation_key, filter_parent_annotation_key)
resources = policy.run()
self.assertEqual(resources[0]['name'], resource_name)
self.assertEqual(resources[0][parent_annotation_key]['name'], parent_resource_name)
def test_kms_cryptokey_get(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
cryptokey_name = 'cloud-custodian'
parent_resource_name = 'projects/{}/locations/{}/keyRings/{}' \
.format(project_id, location_name, keyring_name)
resource_name = '{}/cryptoKeys/{}'.format(parent_resource_name, cryptokey_name)
session_factory = self.replay_flight_data('kms-cryptokey-get', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-cryptokey-dryrun',
'resource': 'gcp.kms-cryptokey',
'mode': {
'type': 'gcp-audit',
'methods': ['CreateCryptoKey']
}},
session_factory=session_factory)
parent_annotation_key = policy.resource_manager.resource_type.get_parent_annotation_key()
exec_mode = policy.get_execution_mode()
event = event_data('kms-cryptokey-create.json')
resources = exec_mode.run(event, None)
self.assertEqual(resources[0]['name'], resource_name)
self.assertEqual(resources[0][parent_annotation_key]['name'], parent_resource_name)
class KmsCryptoKeyVersionTest(BaseTest):
def test_kms_cryptokey_version_query(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
cryptokey_name = 'cloud-custodian'
cryptokey_version_name = '1'
parent_resource_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}' \
.format(project_id, location_name, keyring_name, cryptokey_name)
resource_name = '{}/cryptoKeyVersions/{}'.format(
parent_resource_name, cryptokey_version_name)
session_factory = self.replay_flight_data(
'kms-cryptokey-version-query', project_id=project_id)
filter_parent_annotation_key = 'c7n:kms-cryptokey'
policy = self.load_policy(
{'name': 'gcp-kms-cryptokey-version-dryrun',
'resource': 'gcp.kms-cryptokey-version',
'query': [{'location': location_name}],
'filters': [{
'type': 'value',
'key': '\"{}\".name'.format(filter_parent_annotation_key),
'op': 'regex',
'value': parent_resource_name
}]},
session_factory=session_factory)
parent_annotation_key = policy.resource_manager.resource_type.get_parent_annotation_key()
# If fails there, policies using filters for the resource
# need to be updated since the key has been changed.
self.assertEqual(parent_annotation_key, filter_parent_annotation_key)
resources = policy.run()
self.assertEqual(resources[0]['name'], resource_name)
self.assertEqual(resources[0][parent_annotation_key]['name'], parent_resource_name)
def test_kms_cryptokey_version_get(self):
project_id = 'cloud-custodian'
location_name = 'us-central1'
keyring_name = 'cloud-custodian'
cryptokey_name = 'cloud-custodian'
cryptokey_version_name = '1'
parent_resource_name = 'projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}' \
.format(project_id, location_name, keyring_name, cryptokey_name)
resource_name = '{}/cryptoKeyVersions/{}'.format(
parent_resource_name, cryptokey_version_name)
session_factory = self.replay_flight_data(
'kms-cryptokey-version-get', project_id=project_id)
policy = self.load_policy(
{'name': 'gcp-kms-cryptokey-version-dryrun',
'resource': 'gcp.kms-cryptokey-version',
'mode': {
'type': 'gcp-audit',
'methods': ['CreateCryptoKeyVersion']
}},
session_factory=session_factory)
parent_annotation_key = policy.resource_manager.resource_type.get_parent_annotation_key()
exec_mode = policy.get_execution_mode()
event = event_data('kms-cryptokey-version-create.json')
resources = exec_mode.run(event, None)
self.assertEqual(resources[0]['name'], resource_name)
self.assertEqual(resources[0][parent_annotation_key]['name'], parent_resource_name)
| 44.051643
| 99
| 0.638389
| 1,000
| 9,383
| 5.687
| 0.1
| 0.052224
| 0.066819
| 0.052752
| 0.90786
| 0.869351
| 0.869351
| 0.862493
| 0.844206
| 0.83559
| 0
| 0.006453
| 0.240328
| 9,383
| 212
| 100
| 44.259434
| 0.791386
| 0.035063
| 0
| 0.764368
| 0
| 0
| 0.209374
| 0.095512
| 0
| 0
| 0
| 0
| 0.086207
| 1
| 0.045977
| false
| 0
| 0.005747
| 0
| 0.068966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77563cf457c5f1c6e6b12d206d216ca9a4a34571
| 2,636
|
py
|
Python
|
flask_discord_interactions/tests/test_register.py
|
etrotta/flask-discord-interactions
|
b7e81d8919b26e7b2852c874b4943e45b8d6613a
|
[
"MIT"
] | 24
|
2021-01-03T22:36:51.000Z
|
2022-03-13T23:19:25.000Z
|
flask_discord_interactions/tests/test_register.py
|
etrotta/flask-discord-interactions
|
b7e81d8919b26e7b2852c874b4943e45b8d6613a
|
[
"MIT"
] | 48
|
2021-01-11T00:30:42.000Z
|
2022-03-25T10:25:14.000Z
|
flask_discord_interactions/tests/test_register.py
|
etrotta/flask-discord-interactions
|
b7e81d8919b26e7b2852c874b4943e45b8d6613a
|
[
"MIT"
] | 12
|
2021-04-11T23:31:52.000Z
|
2022-02-10T08:15:44.000Z
|
from flask import Flask
from flask_discord_interactions import DiscordInteractions
from flask_discord_interactions.context import ApplicationCommandType
def test_register_command():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
@discord.command()
def ping(ctx):
return "pong"
discord.update_commands()
def test_register_user_command():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
@discord.command(type=ApplicationCommandType.USER)
def ping(ctx):
return "pong"
@discord.command(type=ApplicationCommandType.USER)
def PING(ctx):
return "pong"
@discord.command(name="user test", type=ApplicationCommandType.USER)
def ping(ctx):
return "pong"
discord.update_commands()
def test_register_message_command():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
@discord.command(type=ApplicationCommandType.MESSAGE)
def ping(ctx):
return "pong"
@discord.command(type=ApplicationCommandType.MESSAGE)
def PING(ctx):
return "pong"
@discord.command(name="user test", type=ApplicationCommandType.MESSAGE)
def ping(ctx):
return "pong"
discord.update_commands()
def test_register_subcommand():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
group = discord.command_group("group")
@group.command()
def subcommand(ctx):
return "pong"
discord.update_commands()
def test_register_options():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
@discord.command()
def ping(ctx, option1: str, option2: float, option3: str = ""):
return f"pong"
discord.update_commands()
def test_register_subcommand_options():
app = Flask(__name__)
app.config["DONT_VALIDATE_SIGNATURE"] = True
app.config["DONT_REGISTER_WITH_DISCORD"] = True
discord = DiscordInteractions(app)
group = discord.command_group("group")
@group.command()
def subcommand(ctx, option1: str, option2: float, option3: str = ""):
return "pong"
discord.update_commands()
| 24.407407
| 75
| 0.701442
| 293
| 2,636
| 6.03413
| 0.129693
| 0.061086
| 0.088235
| 0.090498
| 0.907805
| 0.893665
| 0.893665
| 0.893665
| 0.816742
| 0.784502
| 0
| 0.002812
| 0.19044
| 2,636
| 107
| 76
| 24.635514
| 0.825679
| 0
| 0
| 0.788732
| 0
| 0
| 0.137329
| 0.111533
| 0
| 0
| 0
| 0
| 0
| 1
| 0.225352
| false
| 0
| 0.042254
| 0.140845
| 0.408451
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
6214a8208074ef873ae608d4f0c92316517edffe
| 18,558
|
py
|
Python
|
src/MBE_Potential.py
|
heindelj/MBE_Toolkit
|
4740f4cf939f720875a746c3cf2e96cf1dbfb3a9
|
[
"MIT"
] | null | null | null |
src/MBE_Potential.py
|
heindelj/MBE_Toolkit
|
4740f4cf939f720875a746c3cf2e96cf1dbfb3a9
|
[
"MIT"
] | null | null | null |
src/MBE_Potential.py
|
heindelj/MBE_Toolkit
|
4740f4cf939f720875a746c3cf2e96cf1dbfb3a9
|
[
"MIT"
] | null | null | null |
from Fragments import Fragments
from Potential import *
import numpy as np
from math import comb
from ase.units import Hartree, Bohr
import sys, os, time
from multiprocessing import Pool
class MBE_Potential:
"""
Base class of the MBE potentials.
"""
def log_mb_terms(self, nbody_energies, nbody_forces):
"""
Logs all of the n-body energies and forces calculated and returns it as a dictionary
"""
mb_terms = {}
for i, nbody_force in enumerate(nbody_forces):
key = str(i+1) + "body_forces"
mb_terms[key] = nbody_force
for i, nbody_energy in enumerate(nbody_energies):
key = str(i+1) + "body_energy"
mb_terms[key] = nbody_energy
return mb_terms
def evaluate_on_geometry(self, geometry):
"""This is a thin wrapper around evaluate_on_fragments() which allows
raw coordinates to be passed in, and then fragments those coordinates
according to the shape of the self.fragments.fragments.
Args:
geometry (ndarray): Nx3 array of cartesian coordinates
"""
self.fragments.fragment_geometry(geometry)
if not self.return_mb_terms:
energy, forces = self.evaluate_on_fragments()
return energy, forces
else:
energy, forces, mb_terms = self.evaluate_on_fragments()
return energy, forces, mb_terms
def evaluate_on_geometry_parallel(self, geometry):
"""This is a thin wrapper around evaluate_on_fragments() which allows
raw coordinates to be passed in, and then fragments those coordinates
according to the shape of the self.fragments.fragments.
Args:
geometry (ndarray): Nx3 array of cartesian coordinates
"""
self.fragments.fragment_geometry(geometry)
if not self.return_mb_terms:
energy, forces = self.evaluate_on_fragments_parallel()
return energy, forces
else:
energy, forces, mb_terms = self.evaluate_on_fragments_parallel()
return energy, forces, mb_terms
class ASE_MBE_Potential(MBE_Potential):
"""
Computes the MBE using ASE calculators. Takes an order of the MBE,
Fragments in the form of Atoms objects, and a calculator with which to
carry out the MBE.
"""
def __init__(self, highest_order: int, fragments: Fragments, nproc=8, return_order_n=None, return_mb_terms=False):
self.highest_order = highest_order
self.fragments = fragments
self._pool = Pool(nproc)
self.return_order_n = return_order_n # an integer which allows the n-body term to be returned rather than the total.
self.return_mb_terms = return_mb_terms
@staticmethod
def evaluate_ase(fragment):
forces = fragment.get_forces()
return fragment.get_potential_energy() / Hartree, forces / Hartree * Bohr
def evaluate_on_fragments(self):
"""
Uses the ASE Calculator object attached to the Fragments to calculate the forces
and energies for every fragment in self.fragments.
This operates directly on the fragments brought in with self.fragments
"""
# this will hang on to the n-body forces and energies summed for all
# n-mers. These will then be combined with proper combinatorial weights
# to get the total forces and energies.
forces_sum = []
energy_sum = []
for order in range(self.highest_order):
fragment_combinations = self.fragments.make_nmers(order + 1)
atom_indices = self.fragments.get_indices_for_fragment_combination(order + 1)
# each force sum is the size of forces for total system
forces_sum.append(np.zeros( (len(self.fragments.flattened_atom_labels), 3), dtype=np.float64))
energy_sum.append(0.0)
# now get components of the MBE by summing energies and forces for each order
for i_frag, fragment in enumerate(fragment_combinations):
energy, forces = self.evaluate_ase(fragment)
# add forces in the appropriate rows of force sum
energy_sum[order] += energy
assert(len(forces) == len(fragment))
for i_force, force in enumerate(forces):
forces_sum[order][atom_indices[i_frag][i_force]] += force
### now that we have the sums for each part, we must weight them by the
### combinatorial number of times they show up and accumulate the totals.
total_forces = np.zeros( (len(self.fragments.flattened_atom_labels), 3), dtype=np.float64)
total_energy = 0.0
nbody_energies = [0 for x in range(self.highest_order)]
nbody_forces = [np.zeros_like(total_forces) for x in range(self.highest_order)]
# set the appropriate 1-body terms
nbody_forces[0] = forces_sum[0]
nbody_energies[0] = energy_sum[0]
# multiply each many-body sum by the appropriate combinatorial factor
N = len(self.fragments.fragments)
try:
for iMBE in range(1, self.highest_order):
for i in range(iMBE+1):
nbody_energies[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * energy_sum[iMBE-i]
nbody_forces[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * forces_sum[iMBE-i]
except ValueError:
print(f"The order of the MBE being evaluated seems to be larger than the number of fragments, {N}. Check that you haven't asked for too high of an MBE by asking for a {self.highest_order}-body expansion.")
sys.exit(1)
# accumulate many-body energies and forces
total_energy = np.sum(nbody_energies)
total_forces = np.sum(nbody_forces, axis=0)
if not self.return_mb_terms:
if self.return_order_n == None:
return total_energy, total_forces
else:
return nbody_energies[self.return_order_n-1], nbody_forces[self.return_order_n-1]
else:
return total_energy, total_forces, self.log_mb_terms(nbody_energies, nbody_forces)
def evaluate_on_fragments_parallel(self):
"""
Uses the Potential object passed in to calculate the forces and energies for every fragment
in self.fragments.
This operates directly on the fragments brought in with self.fragments
"""
# this will hang on to the n-body forces and energies summed for all
# n-mers. These will then be combined with proper combinatorial weights
# to get the total forces and energies.
all_fragments = []
all_indices_into_fragments = []
for order in range(self.highest_order):
all_fragments += self.fragments.make_nmers(order + 1)
all_indices_into_fragments += self.fragments.get_indices_for_fragment_combination(order + 1)
# evaluate the potential on all fragments
energies, forces = map(list, zip(*self._pool.map(self.potential.evaluate, all_fragments)))
# each force sum is the size of forces for total system
forces_sum = np.zeros( (self.highest_order, len(self.fragments.atom_labels), 3), dtype=np.float64)
energy_sum = np.zeros( (self.highest_order,) )
# loop through the appropriate parts of the energies and forces arrays and sum them for each n-body term
N = len(self.fragments.fragments)
comb_sum = 0
global_fragment_index = 0
for order in range(self.highest_order):
mask = np.full(len(energies), False)
mask[np.arange(comb_sum, comb(N, order+1) + comb_sum)] = True
energy_sum[order] = np.sum(energies, where=mask)
for nbody_forces in forces[comb_sum:(comb(N, order+1) + comb_sum)]:
indices = all_indices_into_fragments[global_fragment_index]
global_fragment_index += 1
for i_force, force in enumerate(nbody_forces):
forces_sum[order][indices[i_force]] += force
comb_sum += comb(N, order+1)
### now that we have the sums for each part, we must weight them by the
### combinatorial number of times they show up and accumulate the totals.
total_forces = np.zeros( (len(self.fragments.atom_labels), 3), dtype=np.float64)
total_energy = 0.0
nbody_energies = [0 for x in range(self.highest_order)]
nbody_forces = [np.zeros_like(total_forces) for x in range(self.highest_order)]
# set the appropriate 1-body terms
nbody_forces[0] = forces_sum[0]
nbody_energies[0] = energy_sum[0]
# multiply each many-body sum by the appropriate combinatorial factor
N = len(self.fragments.fragments)
for iMBE in range(1, self.highest_order):
for i in range(iMBE+1):
nbody_energies[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * energy_sum[iMBE-i]
nbody_forces[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * forces_sum[iMBE-i]
# accumulate many-body energies and forces
total_energy = np.sum(nbody_energies)
total_forces = np.sum(nbody_forces, axis=0)
if not self.return_mb_terms:
if self.return_order_n == None:
return total_energy, total_forces
else:
return nbody_energies[self.return_order_n-1], nbody_forces[self.return_order_n-1]
else:
return total_energy, total_forces, self.log_mb_terms(nbody_energies, nbody_forces)
class Classical_MBE_Potential(MBE_Potential):
"""
Implements an MBE potential which calls out to a Potential object and
parses the output energy and forces
"""
# this is broken. Move the potentials into the fragments object.
def __init__(self, highest_order: int, fragments: Fragments, potential: Potential, nproc=8, return_order_n=None, return_mb_terms=False):
self.highest_order = highest_order
self.fragments = fragments
self.potential = potential
self._pool = Pool(nproc)
self.return_order_n = return_order_n # an integer which allows the n-body term to be returned rather than the total.
self.return_mb_terms = return_mb_terms
def evaluate_on_fragments(self):
"""
Uses the Potential object passed in to calculate the forces and energies for every fragment
in self.fragments.
This operates directly on the fragments brought in with self.fragments
"""
# this will hang on to the n-body forces and energies summed for all
# n-mers. These will then be combined with proper combinatorial weights
# to get the total forces and energies.
forces_sum = []
energy_sum = []
for order in range(self.highest_order):
fragment_combinations = self.fragments.make_nmers(order + 1)
atom_indices = self.fragments.get_indices_for_fragment_combination(order + 1)
# each force sum is the size of forces for total system
forces_sum.append(np.zeros( (len(self.fragments.atom_labels), 3), dtype=np.float64))
energy_sum.append(0.0)
# now get components of the MBE by summing energies and forces for each order
for i_frag, fragment in enumerate(fragment_combinations):
energy, forces = self.potential.evaluate(fragment)
# add forces in the appropriate rows of force sum
energy_sum[order] += energy
assert(len(forces) == len(fragment))
for i_force, force in enumerate(forces):
forces_sum[order][atom_indices[i_frag][i_force]] += force
### now that we have the sums for each part, we must weight them by the
### combinatorial number of times they show up and accumulate the totals.
total_forces = np.zeros( (len(self.fragments.atom_labels), 3), dtype=np.float64)
total_energy = 0.0
nbody_energies = [0 for x in range(self.highest_order)]
nbody_forces = [np.zeros_like(total_forces) for x in range(self.highest_order)]
# set the appropriate 1-body terms
nbody_forces[0] = forces_sum[0]
nbody_energies[0] = energy_sum[0]
# multiply each many-body sum by the appropriate combinatorial factor
N = len(self.fragments.fragments)
try:
for iMBE in range(1, self.highest_order):
for i in range(iMBE+1):
nbody_energies[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * energy_sum[iMBE-i]
nbody_forces[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * forces_sum[iMBE-i]
except ValueError:
print(f"The order of the MBE being evaluated seems to be larger than the number of fragments, {N}. Check that you haven't asked for too high of an MBE by asking for a {self.highest_order}-body expansion.")
sys.exit(1)
# accumulate many-body energies and forces
total_energy = np.sum(nbody_energies)
total_forces = np.sum(nbody_forces, axis=0)
#print(total_energy * 627.5)
#print(total_forces * 627.5 / 1.88973)
if not self.return_mb_terms:
if self.return_order_n == None:
return total_energy, total_forces
else:
return nbody_energies[self.return_order_n-1], nbody_forces[self.return_order_n-1]
else:
return total_energy, total_forces, self.log_mb_terms(nbody_energies, nbody_forces)
def evaluate_on_fragments_parallel(self):
"""
Uses the Potential object passed in to calculate the forces and energies for every fragment
in self.fragments.
This operates directly on the fragments brought in with self.fragments
"""
# this will hang on to the n-body forces and energies summed for all
# n-mers. These will then be combined with proper combinatorial weights
# to get the total forces and energies.
all_fragments = []
all_indices_into_fragments = []
for order in range(self.highest_order):
all_fragments += self.fragments.make_nmers(order + 1)
all_indices_into_fragments += self.fragments.get_indices_for_fragment_combination(order + 1)
# evaluate the potential on all fragments
energies, forces = map(list, zip(*self._pool.map(self.potential.evaluate, all_fragments)))
# each force sum is the size of forces for total system
forces_sum = np.zeros( (self.highest_order, len(self.fragments.atom_labels), 3), dtype=np.float64)
energy_sum = np.zeros( (self.highest_order,) )
# loop through the appropriate parts of the energies and forces arrays and sum them for each n-body term
N = len(self.fragments.fragments)
comb_sum = 0
global_fragment_index = 0
for order in range(self.highest_order):
mask = np.full(len(energies), False)
mask[np.arange(comb_sum, comb(N, order+1) + comb_sum)] = True
energy_sum[order] = np.sum(energies, where=mask)
for nbody_forces in forces[comb_sum:(comb(N, order+1) + comb_sum)]:
indices = all_indices_into_fragments[global_fragment_index]
global_fragment_index += 1
for i_force, force in enumerate(nbody_forces):
forces_sum[order][indices[i_force]] += force
comb_sum += comb(N, order+1)
### now that we have the sums for each part, we must weight them by the
### combinatorial number of times they show up and accumulate the totals.
total_forces = np.zeros( (len(self.fragments.atom_labels), 3), dtype=np.float64)
total_energy = 0.0
nbody_energies = [0 for x in range(self.highest_order)]
nbody_forces = [np.zeros_like(total_forces) for x in range(self.highest_order)]
# set the appropriate 1-body terms
nbody_forces[0] = forces_sum[0]
nbody_energies[0] = energy_sum[0]
# multiply each many-body sum by the appropriate combinatorial factor
N = len(self.fragments.fragments)
for iMBE in range(1, self.highest_order):
for i in range(iMBE+1):
nbody_energies[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * energy_sum[iMBE-i]
nbody_forces[iMBE] += (-1)**i * comb(N-(iMBE+1)+i,i) * forces_sum[iMBE-i]
# accumulate many-body energies and forces
total_energy = np.sum(nbody_energies)
total_forces = np.sum(nbody_forces, axis=0)
#print(total_energy * 627.5)
#print(total_forces * 627.5 / 1.88973)
if not self.return_mb_terms:
if self.return_order_n == None:
return total_energy, total_forces
else:
return nbody_energies[self.return_order_n-1], nbody_forces[self.return_order_n-1]
else:
return total_energy, total_forces, self.log_mb_terms(nbody_energies, nbody_forces)
if __name__ == '__main__':
try:
ifile = sys.argv[1]
except:
print("Didn't get an xyz file.")
sys.exit(1)
fragments = Fragments(ifile)
ttm21f = TTM("/home/heindelj/dev/python_development/MBE_Toolkit/bin/")
#mbpol = MBPol("/home/heindelj/dev/python_development/MBE_Toolkit/bin/")
mbe_order=6
mbe_ff = MBE_Potential(mbe_order, fragments, ttm21f, return_mb_terms=True)
start = time.time()
energy, forces, mb_terms = mbe_ff.evaluate_on_fragments_parallel()
print(forces)
for key, value in mb_terms.items():
if "energy" in key:
print(key, ": ", "{:.6f}".format(value * 627.5), " ({:.2f})".format(value / energy * 100))
print("Total Energy MBE: ", "{:.6f}".format(energy * 627.5), "kcal/mol")
print("Total Energy Full: ", "{:.6f}".format(ttm21f.evaluate(np.vstack(fragments.fragments))[0] * 627.5), "kcal/mol")
print(time.time() - start, " seconds")
| 47.341837
| 218
| 0.62857
| 2,476
| 18,558
| 4.538772
| 0.101777
| 0.041644
| 0.039865
| 0.019932
| 0.873554
| 0.865545
| 0.860028
| 0.853533
| 0.834668
| 0.834668
| 0
| 0.013675
| 0.286777
| 18,558
| 391
| 219
| 47.462916
| 0.835373
| 0.250512
| 0
| 0.766234
| 0
| 0.008658
| 0.045191
| 0.007926
| 0
| 0
| 0
| 0
| 0.008658
| 1
| 0.04329
| false
| 0
| 0.030303
| 0
| 0.164502
| 0.034632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62258cbba57b7ec5c6da2fa48144f17b47d26f8a
| 9,147
|
py
|
Python
|
humann2/tests/functional_tests_humann2.py
|
dytk2134/humann2
|
9b8f212bdd910ee7187f06f1550f0c86bce0473b
|
[
"MIT"
] | null | null | null |
humann2/tests/functional_tests_humann2.py
|
dytk2134/humann2
|
9b8f212bdd910ee7187f06f1550f0c86bce0473b
|
[
"MIT"
] | null | null | null |
humann2/tests/functional_tests_humann2.py
|
dytk2134/humann2
|
9b8f212bdd910ee7187f06f1550f0c86bce0473b
|
[
"MIT"
] | null | null | null |
import unittest
import subprocess
import tempfile
import os
import filecmp
import shutil
import cfg
import utils
class TestFunctionalHumann2EndtoEnd(unittest.TestCase):
"""
Test humann2 with end to end functional tests
"""
def test_humann2_fastq(self):
"""
Test the standard humann2 flow on a fastq input file
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq")
# run humann2 test
command = ["humann2","--input",cfg.demo_fastq,"--output",tempdir]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fasta(self):
"""
Test the standard humann2 flow on a fasta input file
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fasta")
# run humann2 test
command = ["humann2","--input",cfg.demo_fasta,"--output",tempdir]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_sam(self):
"""
Test the standard humann2 flow on a sam input file
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("sam")
# run humann2 test
command = ["humann2","--input",cfg.demo_sam,"--output",tempdir]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_m8(self):
"""
Test the standard humann2 flow on a m8 input file
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("m8")
# run humann2 test
command = ["humann2","--input",cfg.demo_m8,"--output",tempdir]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_gene_families(self):
"""
Test the standard humann2 flow on a gene families output file as input
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("gene_families")
# run humann2 test
command = ["humann2","--input",cfg.demo_gene_families,"--output",tempdir]
utils.run_humann2(command)
# check the output files are as expected
# it will include all output files except the gene families output file
# since this file was used as input
for expression, message in utils.check_output(cfg.expected_demo_output_files_genefamilies_input, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fastq_bypass_nucleotide_search(self):
"""
Test the standard humann2 flow on a fastq input file
Test with bypassing nucleotide search
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq_bypass_nucleotide_search")
# run humann2 test
command = ["humann2","--input",cfg.demo_fastq,"--output",tempdir,"--bypass-nucleotide-search"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fasta_bypass_nucleotide_search(self):
"""
Test the standard humann2 flow on a fasta input file
Test with bypassing nucleotide search
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fasta_bypass_nucleotide_search")
# run humann2 test
command = ["humann2","--input",cfg.demo_fasta,"--output",tempdir,"--bypass-nucleotide-search"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fastq_bypass_translated_search(self):
"""
Test the standard humann2 flow on a fastq input file
Test with bypassing translated search
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq_bypass_translated_search")
# run humann2 test
command = ["humann2","--input",cfg.demo_fastq,"--output",tempdir,"--bypass-translated-search"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fasta_bypass_translated_search(self):
"""
Test the standard humann2 flow on a fasta input file
Test with bypassing translated search
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fasta_bypass_translated_search")
# run humann2 test
command = ["humann2","--input",cfg.demo_fasta,"--output",tempdir,"--bypass-translated-search"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fastq_bypass_prescreen(self):
"""
Test the standard humann2 flow on a fastq input file
Test with bypassing prescreen
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq_bypass_prescreen")
# run humann2 test
command = ["humann2","--input",cfg.demo_fastq,"--output",tempdir,"--bypass-prescreen"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fasta_bypass_prescreen(self):
"""
Test the standard humann2 flow on a fasta input file
Test with bypassing prescreen
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fasta_bypass_prescreen")
# run humann2 test
command = ["humann2","--input",cfg.demo_fasta,"--output",tempdir,"--bypass-prescreen"]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
def test_humann2_fastq_custom_taxonomic_profile(self):
"""
Test the standard humann2 flow on a fastq input file
Test with a custom taxonomic profile
"""
# create a temp directory for output
tempdir = utils.create_temp_folder("fastq_custom_taxonomic_profile")
# run humann2 test
command = ["humann2","--input",cfg.demo_fastq,"--output",tempdir,"--taxonomic-profile",
cfg.demo_bugs_list]
utils.run_humann2(command)
# check the output files are as expected
for expression, message in utils.check_output(cfg.expected_demo_output_files, tempdir):
self.assertTrue(expression,message)
# remove the temp directory
utils.remove_temp_folder(tempdir)
| 36.297619
| 114
| 0.638461
| 1,049
| 9,147
| 5.387035
| 0.071497
| 0.048664
| 0.05415
| 0.040347
| 0.913467
| 0.913467
| 0.913467
| 0.913467
| 0.874712
| 0.874712
| 0
| 0.010079
| 0.284137
| 9,147
| 252
| 115
| 36.297619
| 0.852932
| 0.268066
| 0
| 0.5
| 0
| 0
| 0.102397
| 0.047309
| 0
| 0
| 0
| 0
| 0.12766
| 1
| 0.12766
| false
| 0.191489
| 0.085106
| 0
| 0.223404
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
6234a5096c9debeec4b401872e9a45d870b08026
| 5,941
|
py
|
Python
|
dfirtrack_main/tests/osimportname/test_osimportname_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/osimportname/test_osimportname_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/osimportname/test_osimportname_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Os, Osimportname
import urllib.parse
class OsimportnameViewTestCase(TestCase):
""" osimportname view tests """
@classmethod
def setUpTestData(cls):
# create object
os_1 = Os.objects.create(os_name='os_1')
# create object
Osimportname.objects.create(osimportname_name='osimportname_1', osimportname_importer='osimportname_importer_1', os = os_1)
# create user
test_user = User.objects.create_user(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
def test_osimportnames_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/osimportnames/', safe='')
# get response
response = self.client.get('/osimportnames/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_osimportnames_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/')
# compare
self.assertEqual(response.status_code, 200)
def test_osimportnames_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/osimportname/osimportnames_list.html')
def test_osimportnames_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_osimportname')
def test_osimportnames_add_not_logged_in(self):
""" test add view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/osimportnames/add/', safe='')
# get response
response = self.client.get('/osimportnames/add/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_osimportnames_add_logged_in(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/add/')
# compare
self.assertEqual(response.status_code, 200)
def test_osimportnames_add_template(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/add/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/osimportname/osimportnames_add.html')
def test_osimportnames_add_get_user_context(self):
""" test add view """
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/add/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_osimportname')
def test_osimportnames_edit_not_logged_in(self):
""" test edit view """
# get object
osimportname_1 = Osimportname.objects.get(osimportname_name='osimportname_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/osimportnames/' + str(osimportname_1.osimportname_id) + '/edit/', safe='')
# get response
response = self.client.get('/osimportnames/' + str(osimportname_1.osimportname_id) + '/edit/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_osimportnames_edit_logged_in(self):
""" test edit view """
# get object
osimportname_1 = Osimportname.objects.get(osimportname_name='osimportname_1')
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/' + str(osimportname_1.osimportname_id) + '/edit/')
# compare
self.assertEqual(response.status_code, 200)
def test_osimportnames_edit_template(self):
""" test edit view """
# get object
osimportname_1 = Osimportname.objects.get(osimportname_name='osimportname_1')
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/' + str(osimportname_1.osimportname_id) + '/edit/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/osimportname/osimportnames_edit.html')
def test_osimportnames_edit_get_user_context(self):
""" test edit view """
# get object
osimportname_1 = Osimportname.objects.get(osimportname_name='osimportname_1')
# login testuser
login = self.client.login(username='testuser_osimportname', password='SU7QGdCzPMBJd3l9URoS')
# get response
response = self.client.get('/osimportnames/' + str(osimportname_1.osimportname_id) + '/edit/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_osimportname')
| 40.691781
| 135
| 0.673961
| 608
| 5,941
| 6.394737
| 0.108553
| 0.054012
| 0.061728
| 0.070988
| 0.857253
| 0.829733
| 0.815586
| 0.801183
| 0.724794
| 0.699074
| 0
| 0.016036
| 0.212759
| 5,941
| 145
| 136
| 40.972414
| 0.815266
| 0.120687
| 0
| 0.5
| 0
| 0
| 0.205369
| 0.087792
| 0
| 0
| 0
| 0
| 0.193548
| 1
| 0.209677
| false
| 0.16129
| 0.854839
| 0
| 1.080645
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
|
0
| 10
|
6251f2198f7c6c98e3155d5dcd13db316acf70e6
| 231
|
py
|
Python
|
tests/__init__.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 6
|
2021-04-06T19:50:52.000Z
|
2022-01-19T17:42:33.000Z
|
tests/__init__.py
|
WildMeOrg/houston
|
8102229421388e44234c07ee6cb73bf705b6fba0
|
[
"Apache-2.0"
] | 491
|
2021-01-20T01:10:00.000Z
|
2022-03-31T19:30:48.000Z
|
tests/__init__.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 2
|
2021-03-12T02:33:55.000Z
|
2021-03-16T20:18:43.000Z
|
# -*- coding: utf-8 -*-
"""
The Application tests collection
================================
"""
TEST_ASSET_GROUP_UUID = '00000000-0000-0000-0000-000000000003'
TEST_EMPTY_ASSET_GROUP_UUID = '00000000-0000-0000-0000-000000000001'
| 25.666667
| 68
| 0.65368
| 26
| 231
| 5.538462
| 0.615385
| 0.222222
| 0.194444
| 0.305556
| 0.472222
| 0.472222
| 0.472222
| 0
| 0
| 0
| 0
| 0.306604
| 0.082251
| 231
| 8
| 69
| 28.875
| 0.372642
| 0.380952
| 0
| 0
| 0
| 0
| 0.533333
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
657453dabdaa578549c37c6bb974d9796fe55093
| 118
|
py
|
Python
|
8kyu/Invert values.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
8kyu/Invert values.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
8kyu/Invert values.py
|
walkgo/codewars_tasks
|
4c0ab6f0e1d2181318fc15b12dd55ef565ecd223
|
[
"MIT"
] | null | null | null |
def invert(lst):
return list(i * -1 for i in lst)
# Best Practices
def invert(lst):
return [-x for x in lst]
| 16.857143
| 36
| 0.627119
| 22
| 118
| 3.363636
| 0.545455
| 0.243243
| 0.324324
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.254237
| 118
| 7
| 37
| 16.857143
| 0.829545
| 0.118644
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
029bb35581e3e79449efe1da4445c1d877813f4a
| 178,067
|
py
|
Python
|
gateapi-python/gate_api/api/delivery_api.py
|
jarenmt/IEOPUMP
|
220f7f612d299f7305e82fe6c33661e6871f2d86
|
[
"MIT"
] | null | null | null |
gateapi-python/gate_api/api/delivery_api.py
|
jarenmt/IEOPUMP
|
220f7f612d299f7305e82fe6c33661e6871f2d86
|
[
"MIT"
] | null | null | null |
gateapi-python/gate_api/api/delivery_api.py
|
jarenmt/IEOPUMP
|
220f7f612d299f7305e82fe6c33661e6871f2d86
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Gate API v4
Welcome to Gate.io API APIv4 provides spot, margin and futures trading operations. There are public APIs to retrieve the real-time market statistics, and private APIs which needs authentication to trade on user's behalf. # noqa: E501
Contact: support@mail.gate.io
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from gate_api.api_client import ApiClient
from gate_api.exceptions import ApiTypeError, ApiValueError # noqa: F401
class DeliveryApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def list_delivery_contracts(self, settle, **kwargs): # noqa: E501
"""List all futures contracts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_contracts(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.DeliveryContract]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_contracts_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_contracts_with_http_info(self, settle, **kwargs): # noqa: E501
"""List all futures contracts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_contracts_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.DeliveryContract], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_contracts" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_contracts`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/contracts',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DeliveryContract]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def get_delivery_contract(self, settle, contract, **kwargs): # noqa: E501
"""Get a single contract # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_contract(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.DeliveryContract
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_delivery_contract_with_http_info(settle, contract, **kwargs) # noqa: E501
def get_delivery_contract_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Get a single contract # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_contract_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.DeliveryContract, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_delivery_contract" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `get_delivery_contract`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `get_delivery_contract`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'contract' in local_var_params:
path_params['contract'] = local_var_params['contract'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/contracts/{contract}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DeliveryContract', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_order_book(self, settle, contract, **kwargs): # noqa: E501
"""Futures order book # noqa: E501
Bids will be sorted by price from high to low, while asks sorted reversely # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_order_book(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str interval: Order depth. 0 means no aggregation is applied. default to 0
:param int limit: Maximum number of order depth data in asks or bids
:param bool with_id: Whether the order book update ID will be returned. This ID increases by 1 on every order book update
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesOrderBook
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_order_book_with_http_info(settle, contract, **kwargs) # noqa: E501
def list_delivery_order_book_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Futures order book # noqa: E501
Bids will be sorted by price from high to low, while asks sorted reversely # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_order_book_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str interval: Order depth. 0 means no aggregation is applied. default to 0
:param int limit: Maximum number of order depth data in asks or bids
:param bool with_id: Whether the order book update ID will be returned. This ID increases by 1 on every order book update
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesOrderBook, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'interval', 'limit', 'with_id']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_order_book" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_order_book`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `list_delivery_order_book`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 50
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_order_book`, must be a value less than or equal to `50`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_order_book`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'interval' in local_var_params and local_var_params['interval'] is not None: # noqa: E501
query_params.append(('interval', local_var_params['interval'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'with_id' in local_var_params and local_var_params['with_id'] is not None: # noqa: E501
query_params.append(('with_id', local_var_params['with_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/order_book',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesOrderBook', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_trades(self, settle, contract, **kwargs): # noqa: E501
"""Futures trading history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_trades(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param int limit: Maximum number of records to be returned in a single list
:param str last_id: Specify the starting point for this list based on a previously retrieved id This parameter is deprecated. Use `from` and `to` instead to limit time range
:param int _from: Specify starting time in Unix seconds. If not specified, `to` and `limit` will be used to limit response items. If items between `from` and `to` are more than `limit`, only `limit` number will be returned.
:param int to: Specify end time in Unix seconds, default to current time
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesTrade]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_trades_with_http_info(settle, contract, **kwargs) # noqa: E501
def list_delivery_trades_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Futures trading history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_trades_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param int limit: Maximum number of records to be returned in a single list
:param str last_id: Specify the starting point for this list based on a previously retrieved id This parameter is deprecated. Use `from` and `to` instead to limit time range
:param int _from: Specify starting time in Unix seconds. If not specified, `to` and `limit` will be used to limit response items. If items between `from` and `to` are more than `limit`, only `limit` number will be returned.
:param int to: Specify end time in Unix seconds, default to current time
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesTrade], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'limit', 'last_id', '_from', 'to']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_trades" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_trades`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `list_delivery_trades`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_trades`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_trades`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'last_id' in local_var_params and local_var_params['last_id'] is not None: # noqa: E501
query_params.append(('last_id', local_var_params['last_id'])) # noqa: E501
if '_from' in local_var_params and local_var_params['_from'] is not None: # noqa: E501
query_params.append(('from', local_var_params['_from'])) # noqa: E501
if 'to' in local_var_params and local_var_params['to'] is not None: # noqa: E501
query_params.append(('to', local_var_params['to'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/trades',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesTrade]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_candlesticks(self, settle, contract, **kwargs): # noqa: E501
"""Get futures candlesticks # noqa: E501
Return specified contract candlesticks. If prefix `contract` with `mark_`, the contract's mark price candlesticks are returned; if prefix with `index_`, index price candlesticks will be returned. Maximum of 2000 points are returned in one query. Be sure not to exceed the limit when specifying `from`, `to` and `interval` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_candlesticks(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param int _from: Start time of candlesticks, formatted in Unix timestamp in seconds. Default to`to - 100 * interval` if not specified
:param int to: End time of candlesticks, formatted in Unix timestamp in seconds. Default to current time
:param int limit: Maximum recent data points to return. `limit` is conflicted with `from` and `to`. If either `from` or `to` is specified, request will be rejected.
:param str interval: Interval time between data points
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesCandlestick]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_candlesticks_with_http_info(settle, contract, **kwargs) # noqa: E501
def list_delivery_candlesticks_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Get futures candlesticks # noqa: E501
Return specified contract candlesticks. If prefix `contract` with `mark_`, the contract's mark price candlesticks are returned; if prefix with `index_`, index price candlesticks will be returned. Maximum of 2000 points are returned in one query. Be sure not to exceed the limit when specifying `from`, `to` and `interval` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_candlesticks_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param int _from: Start time of candlesticks, formatted in Unix timestamp in seconds. Default to`to - 100 * interval` if not specified
:param int to: End time of candlesticks, formatted in Unix timestamp in seconds. Default to current time
:param int limit: Maximum recent data points to return. `limit` is conflicted with `from` and `to`. If either `from` or `to` is specified, request will be rejected.
:param str interval: Interval time between data points
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesCandlestick], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', '_from', 'to', 'limit', 'interval']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method list_delivery_candlesticks" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_candlesticks`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `list_delivery_candlesticks`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 2000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_candlesticks`, must be a value less than or equal to `2000`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if '_from' in local_var_params and local_var_params['_from'] is not None: # noqa: E501
query_params.append(('from', local_var_params['_from'])) # noqa: E501
if 'to' in local_var_params and local_var_params['to'] is not None: # noqa: E501
query_params.append(('to', local_var_params['to'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'interval' in local_var_params and local_var_params['interval'] is not None: # noqa: E501
query_params.append(('interval', local_var_params['interval'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/candlesticks',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesCandlestick]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_tickers(self, settle, **kwargs): # noqa: E501
"""List futures tickers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_tickers(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesTicker]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_tickers_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_tickers_with_http_info(self, settle, **kwargs): # noqa: E501
"""List futures tickers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_tickers_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesTicker], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_tickers" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_tickers`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/tickers',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesTicker]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_insurance_ledger(self, settle, **kwargs): # noqa: E501
"""Futures insurance balance history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_insurance_ledger(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param int limit: Maximum number of records to be returned in a single list
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.InsuranceRecord]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_insurance_ledger_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_insurance_ledger_with_http_info(self, settle, **kwargs): # noqa: E501
"""Futures insurance balance history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_insurance_ledger_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param int limit: Maximum number of records to be returned in a single list
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.InsuranceRecord], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'limit']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method list_delivery_insurance_ledger" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_insurance_ledger`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_insurance_ledger`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_insurance_ledger`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/insurance',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[InsuranceRecord]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_accounts(self, settle, **kwargs): # noqa: E501
"""Query futures account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_accounts(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesAccount
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_accounts_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_accounts_with_http_info(self, settle, **kwargs): # noqa: E501
"""Query futures account # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_accounts_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesAccount, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_accounts" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_accounts`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/accounts',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesAccount', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_account_book(self, settle, **kwargs): # noqa: E501
"""Query account book # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_account_book(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param int limit: Maximum number of records to be returned in a single list
:param int _from: Start timestamp
:param int to: End timestamp
:param str type: Changing Type: - dnw: Deposit & Withdraw - pnl: Profit & Loss by reducing position - fee: Trading fee - refr: Referrer rebate - fund: Funding - point_dnw: POINT Deposit & Withdraw - point_fee: POINT Trading fee - point_refr: POINT Referrer rebate
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesAccountBook]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_account_book_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_account_book_with_http_info(self, settle, **kwargs): # noqa: E501
"""Query account book # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_account_book_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param int limit: Maximum number of records to be returned in a single list
:param int _from: Start timestamp
:param int to: End timestamp
:param str type: Changing Type: - dnw: Deposit & Withdraw - pnl: Profit & Loss by reducing position - fee: Trading fee - refr: Referrer rebate - fund: Funding - point_dnw: POINT Deposit & Withdraw - point_fee: POINT Trading fee - point_refr: POINT Referrer rebate
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesAccountBook], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'limit', '_from', 'to', 'type']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method list_delivery_account_book" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_account_book`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_account_book`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_account_book`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if '_from' in local_var_params and local_var_params['_from'] is not None: # noqa: E501
query_params.append(('from', local_var_params['_from'])) # noqa: E501
if 'to' in local_var_params and local_var_params['to'] is not None: # noqa: E501
query_params.append(('to', local_var_params['to'])) # noqa: E501
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/account_book',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesAccountBook]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_positions(self, settle, **kwargs): # noqa: E501
"""List all positions of a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_positions(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.Position]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_positions_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_positions_with_http_info(self, settle, **kwargs): # noqa: E501
"""List all positions of a user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_positions_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.Position], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_positions" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_positions`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/positions',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Position]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def get_delivery_position(self, settle, contract, **kwargs): # noqa: E501
"""Get single position # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_position(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.Position
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_delivery_position_with_http_info(settle, contract, **kwargs) # noqa: E501
def get_delivery_position_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Get single position # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_position_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.Position, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_delivery_position" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `get_delivery_position`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `get_delivery_position`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'contract' in local_var_params:
path_params['contract'] = local_var_params['contract'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/positions/{contract}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Position', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def update_delivery_position_margin(self, settle, contract, change, **kwargs): # noqa: E501
"""Update position margin # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_margin(settle, contract, change, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str change: Margin change. Use positive number to increase margin, negative number otherwise. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.Position
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_delivery_position_margin_with_http_info(settle, contract, change, **kwargs) # noqa: E501
def update_delivery_position_margin_with_http_info(self, settle, contract, change, **kwargs): # noqa: E501
"""Update position margin # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_margin_with_http_info(settle, contract, change, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str change: Margin change. Use positive number to increase margin, negative number otherwise. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.Position, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'change']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method update_delivery_position_margin" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `update_delivery_position_margin`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `update_delivery_position_margin`"
) # noqa: E501
# verify the required parameter 'change' is set
if self.api_client.client_side_validation and (
'change' not in local_var_params or local_var_params['change'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `change` when calling `update_delivery_position_margin`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'contract' in local_var_params:
path_params['contract'] = local_var_params['contract'] # noqa: E501
query_params = []
if 'change' in local_var_params and local_var_params['change'] is not None: # noqa: E501
query_params.append(('change', local_var_params['change'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/positions/{contract}/margin',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Position', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def update_delivery_position_leverage(self, settle, contract, leverage, **kwargs): # noqa: E501
"""Update position leverage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_leverage(settle, contract, leverage, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str leverage: New position leverage (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.Position
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_delivery_position_leverage_with_http_info(settle, contract, leverage, **kwargs) # noqa: E501
def update_delivery_position_leverage_with_http_info(self, settle, contract, leverage, **kwargs): # noqa: E501
"""Update position leverage # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_leverage_with_http_info(settle, contract, leverage, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str leverage: New position leverage (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.Position, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'leverage']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method update_delivery_position_leverage" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `update_delivery_position_leverage`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `update_delivery_position_leverage`"
) # noqa: E501
# verify the required parameter 'leverage' is set
if self.api_client.client_side_validation and (
'leverage' not in local_var_params or local_var_params['leverage'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `leverage` when calling `update_delivery_position_leverage`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'contract' in local_var_params:
path_params['contract'] = local_var_params['contract'] # noqa: E501
query_params = []
if 'leverage' in local_var_params and local_var_params['leverage'] is not None: # noqa: E501
query_params.append(('leverage', local_var_params['leverage'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/positions/{contract}/leverage',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Position', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def update_delivery_position_risk_limit(self, settle, contract, risk_limit, **kwargs): # noqa: E501
"""Update position risk limit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_risk_limit(settle, contract, risk_limit, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str risk_limit: New position risk limit (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.Position
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_delivery_position_risk_limit_with_http_info(
settle, contract, risk_limit, **kwargs
) # noqa: E501
def update_delivery_position_risk_limit_with_http_info(self, settle, contract, risk_limit, **kwargs): # noqa: E501
"""Update position risk limit # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_delivery_position_risk_limit_with_http_info(settle, contract, risk_limit, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str risk_limit: New position risk limit (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.Position, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'risk_limit']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method update_delivery_position_risk_limit" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `update_delivery_position_risk_limit`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `update_delivery_position_risk_limit`"
) # noqa: E501
# verify the required parameter 'risk_limit' is set
if self.api_client.client_side_validation and (
'risk_limit' not in local_var_params or local_var_params['risk_limit'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `risk_limit` when calling `update_delivery_position_risk_limit`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'contract' in local_var_params:
path_params['contract'] = local_var_params['contract'] # noqa: E501
query_params = []
if 'risk_limit' in local_var_params and local_var_params['risk_limit'] is not None: # noqa: E501
query_params.append(('risk_limit', local_var_params['risk_limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/positions/{contract}/risk_limit',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Position', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_orders(self, settle, status, **kwargs): # noqa: E501
"""List futures orders # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_orders(settle, status, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str status: Only list the orders with this status (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param str last_id: Specify list staring point using the `id` of last record in previous list-query results
:param int count_total: Whether to return total number matched. Default to 0(no return)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesOrder]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_orders_with_http_info(settle, status, **kwargs) # noqa: E501
def list_delivery_orders_with_http_info(self, settle, status, **kwargs): # noqa: E501
"""List futures orders # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_orders_with_http_info(settle, status, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str status: Only list the orders with this status (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param str last_id: Specify list staring point using the `id` of last record in previous list-query results
:param int count_total: Whether to return total number matched. Default to 0(no return)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesOrder], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'status', 'contract', 'limit', 'offset', 'last_id', 'count_total']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_orders" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_orders`"
) # noqa: E501
# verify the required parameter 'status' is set
if self.api_client.client_side_validation and (
'status' not in local_var_params or local_var_params['status'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `status` when calling `list_delivery_orders`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_orders`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_orders`, must be a value greater than or equal to `1`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'offset' in local_var_params and local_var_params['offset'] < 0
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `offset` when calling `list_delivery_orders`, must be a value greater than or equal to `0`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'status' in local_var_params and local_var_params['status'] is not None: # noqa: E501
query_params.append(('status', local_var_params['status'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'last_id' in local_var_params and local_var_params['last_id'] is not None: # noqa: E501
query_params.append(('last_id', local_var_params['last_id'])) # noqa: E501
if 'count_total' in local_var_params and local_var_params['count_total'] is not None: # noqa: E501
query_params.append(('count_total', local_var_params['count_total'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/orders',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesOrder]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def create_delivery_order(self, settle, futures_order, **kwargs): # noqa: E501
"""Create a futures order # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_delivery_order(settle, futures_order, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param FuturesOrder futures_order: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesOrder
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_delivery_order_with_http_info(settle, futures_order, **kwargs) # noqa: E501
def create_delivery_order_with_http_info(self, settle, futures_order, **kwargs): # noqa: E501
"""Create a futures order # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_delivery_order_with_http_info(settle, futures_order, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param FuturesOrder futures_order: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesOrder, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'futures_order']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method create_delivery_order" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `create_delivery_order`"
) # noqa: E501
# verify the required parameter 'futures_order' is set
if self.api_client.client_side_validation and (
'futures_order' not in local_var_params or local_var_params['futures_order'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `futures_order` when calling `create_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'futures_order' in local_var_params:
body_params = local_var_params['futures_order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']
) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/orders',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesOrder', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def cancel_delivery_orders(self, settle, contract, **kwargs): # noqa: E501
"""Cancel all `open` orders matched # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_delivery_orders(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str side: All bids or asks. Both included if not specified
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesOrder]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_delivery_orders_with_http_info(settle, contract, **kwargs) # noqa: E501
def cancel_delivery_orders_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Cancel all `open` orders matched # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_delivery_orders_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param str side: All bids or asks. Both included if not specified
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesOrder], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'side']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method cancel_delivery_orders" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `cancel_delivery_orders`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `cancel_delivery_orders`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'side' in local_var_params and local_var_params['side'] is not None: # noqa: E501
query_params.append(('side', local_var_params['side'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/orders',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesOrder]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def get_delivery_order(self, settle, order_id, **kwargs): # noqa: E501
"""Get a single order # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_order(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesOrder
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_delivery_order_with_http_info(settle, order_id, **kwargs) # noqa: E501
def get_delivery_order_with_http_info(self, settle, order_id, **kwargs): # noqa: E501
"""Get a single order # noqa: E501
Zero-fill order cannot be retrieved for 60 seconds after cancellation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_delivery_order_with_http_info(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesOrder, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'order_id']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_delivery_order" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `get_delivery_order`"
) # noqa: E501
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and (
'order_id' not in local_var_params or local_var_params['order_id'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `order_id` when calling `get_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'order_id' in local_var_params:
path_params['order_id'] = local_var_params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/orders/{order_id}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesOrder', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def cancel_delivery_order(self, settle, order_id, **kwargs): # noqa: E501
"""Cancel a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_delivery_order(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesOrder
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_delivery_order_with_http_info(settle, order_id, **kwargs) # noqa: E501
def cancel_delivery_order_with_http_info(self, settle, order_id, **kwargs): # noqa: E501
"""Cancel a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_delivery_order_with_http_info(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesOrder, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'order_id']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method cancel_delivery_order" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `cancel_delivery_order`"
) # noqa: E501
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and (
'order_id' not in local_var_params or local_var_params['order_id'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `order_id` when calling `cancel_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'order_id' in local_var_params:
path_params['order_id'] = local_var_params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/orders/{order_id}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesOrder', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def get_my_delivery_trades(self, settle, **kwargs): # noqa: E501
"""List personal trading history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_my_delivery_trades(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int order: Futures order ID, return related data only if specified
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param str last_id: Specify list staring point using the `id` of last record in previous list-query results
:param int count_total: Whether to return total number matched. Default to 0(no return)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.MyFuturesTrade]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_my_delivery_trades_with_http_info(settle, **kwargs) # noqa: E501
def get_my_delivery_trades_with_http_info(self, settle, **kwargs): # noqa: E501
"""List personal trading history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_my_delivery_trades_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int order: Futures order ID, return related data only if specified
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param str last_id: Specify list staring point using the `id` of last record in previous list-query results
:param int count_total: Whether to return total number matched. Default to 0(no return)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.MyFuturesTrade], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'order', 'limit', 'offset', 'last_id', 'count_total']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method get_my_delivery_trades" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `get_my_delivery_trades`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `get_my_delivery_trades`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `get_my_delivery_trades`, must be a value greater than or equal to `1`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'offset' in local_var_params and local_var_params['offset'] < 0
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `offset` when calling `get_my_delivery_trades`, must be a value greater than or equal to `0`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'order' in local_var_params and local_var_params['order'] is not None: # noqa: E501
query_params.append(('order', local_var_params['order'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'last_id' in local_var_params and local_var_params['last_id'] is not None: # noqa: E501
query_params.append(('last_id', local_var_params['last_id'])) # noqa: E501
if 'count_total' in local_var_params and local_var_params['count_total'] is not None: # noqa: E501
query_params.append(('count_total', local_var_params['count_total'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/my_trades',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MyFuturesTrade]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_position_close(self, settle, **kwargs): # noqa: E501
"""List position close history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_position_close(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.PositionClose]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_position_close_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_position_close_with_http_info(self, settle, **kwargs): # noqa: E501
"""List position close history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_position_close_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.PositionClose], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'limit']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method list_delivery_position_close" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_position_close`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_position_close`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_position_close`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/position_close',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[PositionClose]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_liquidates(self, settle, **kwargs): # noqa: E501
"""List liquidation history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_liquidates(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int at: Specify a liquidation timestamp
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesLiquidate]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_liquidates_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_liquidates_with_http_info(self, settle, **kwargs): # noqa: E501
"""List liquidation history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_liquidates_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int at: Specify a liquidation timestamp
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesLiquidate], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'limit', 'at']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_liquidates" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_liquidates`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_liquidates`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_liquidates`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'at' in local_var_params and local_var_params['at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/liquidates',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesLiquidate]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_delivery_settlements(self, settle, **kwargs): # noqa: E501
"""List settlement history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_settlements(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int at: Specify a settlement timestamp
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.DeliverySettlement]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_delivery_settlements_with_http_info(settle, **kwargs) # noqa: E501
def list_delivery_settlements_with_http_info(self, settle, **kwargs): # noqa: E501
"""List settlement history # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_delivery_settlements_with_http_info(settle, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract
:param int limit: Maximum number of records to be returned in a single list
:param int at: Specify a settlement timestamp
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.DeliverySettlement], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract', 'limit', 'at']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError("Got an unexpected keyword argument '%s'" " to method list_delivery_settlements" % k)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_delivery_settlements`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_settlements`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_delivery_settlements`, must be a value greater than or equal to `1`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'at' in local_var_params and local_var_params['at'] is not None: # noqa: E501
query_params.append(('at', local_var_params['at'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/settlements',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DeliverySettlement]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def list_price_triggered_delivery_orders(self, settle, status, **kwargs): # noqa: E501
"""List all auto orders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_price_triggered_delivery_orders(settle, status, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str status: Only list the orders with this status (required)
:param str contract: Futures contract, return related data only if specified
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesPriceTriggeredOrder]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.list_price_triggered_delivery_orders_with_http_info(settle, status, **kwargs) # noqa: E501
def list_price_triggered_delivery_orders_with_http_info(self, settle, status, **kwargs): # noqa: E501
"""List all auto orders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_price_triggered_delivery_orders_with_http_info(settle, status, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str status: Only list the orders with this status (required)
:param str contract: Futures contract, return related data only if specified
:param int limit: Maximum number of records to be returned in a single list
:param int offset: List offset, starting from 0
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesPriceTriggeredOrder], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'status', 'contract', 'limit', 'offset']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method list_price_triggered_delivery_orders" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `list_price_triggered_delivery_orders`"
) # noqa: E501
# verify the required parameter 'status' is set
if self.api_client.client_side_validation and (
'status' not in local_var_params or local_var_params['status'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `status` when calling `list_price_triggered_delivery_orders`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] > 1000
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_price_triggered_delivery_orders`, must be a value less than or equal to `1000`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'limit' in local_var_params and local_var_params['limit'] < 1
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `limit` when calling `list_price_triggered_delivery_orders`, must be a value greater than or equal to `1`"
) # noqa: E501
if (
self.api_client.client_side_validation and 'offset' in local_var_params and local_var_params['offset'] < 0
): # noqa: E501
raise ApiValueError(
"Invalid value for parameter `offset` when calling `list_price_triggered_delivery_orders`, must be a value greater than or equal to `0`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'status' in local_var_params and local_var_params['status'] is not None: # noqa: E501
query_params.append(('status', local_var_params['status'])) # noqa: E501
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/price_orders',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesPriceTriggeredOrder]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def create_price_triggered_delivery_order(self, settle, futures_price_triggered_order, **kwargs): # noqa: E501
"""Create a price-triggered order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_price_triggered_delivery_order(settle, futures_price_triggered_order, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param FuturesPriceTriggeredOrder futures_price_triggered_order: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.TriggerOrderResponse
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_price_triggered_delivery_order_with_http_info(
settle, futures_price_triggered_order, **kwargs
) # noqa: E501
def create_price_triggered_delivery_order_with_http_info(
self, settle, futures_price_triggered_order, **kwargs
): # noqa: E501
"""Create a price-triggered order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_price_triggered_delivery_order_with_http_info(settle, futures_price_triggered_order, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param FuturesPriceTriggeredOrder futures_price_triggered_order: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.TriggerOrderResponse, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'futures_price_triggered_order']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method create_price_triggered_delivery_order" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `create_price_triggered_delivery_order`"
) # noqa: E501
# verify the required parameter 'futures_price_triggered_order' is set
if self.api_client.client_side_validation and (
'futures_price_triggered_order' not in local_var_params
or local_var_params['futures_price_triggered_order'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `futures_price_triggered_order` when calling `create_price_triggered_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'futures_price_triggered_order' in local_var_params:
body_params = local_var_params['futures_price_triggered_order']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']
) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/price_orders',
'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TriggerOrderResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def cancel_price_triggered_delivery_order_list(self, settle, contract, **kwargs): # noqa: E501
"""Cancel all open orders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_price_triggered_delivery_order_list(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: list[gate_api.FuturesPriceTriggeredOrder]
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_price_triggered_delivery_order_list_with_http_info(settle, contract, **kwargs) # noqa: E501
def cancel_price_triggered_delivery_order_list_with_http_info(self, settle, contract, **kwargs): # noqa: E501
"""Cancel all open orders # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_price_triggered_delivery_order_list_with_http_info(settle, contract, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str contract: Futures contract (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(list[gate_api.FuturesPriceTriggeredOrder], status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'contract']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method cancel_price_triggered_delivery_order_list" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `cancel_price_triggered_delivery_order_list`"
) # noqa: E501
# verify the required parameter 'contract' is set
if self.api_client.client_side_validation and (
'contract' not in local_var_params or local_var_params['contract'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `contract` when calling `cancel_price_triggered_delivery_order_list`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
query_params = []
if 'contract' in local_var_params and local_var_params['contract'] is not None: # noqa: E501
query_params.append(('contract', local_var_params['contract'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/price_orders',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[FuturesPriceTriggeredOrder]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def get_price_triggered_delivery_order(self, settle, order_id, **kwargs): # noqa: E501
"""Get a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_price_triggered_delivery_order(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesPriceTriggeredOrder
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_price_triggered_delivery_order_with_http_info(settle, order_id, **kwargs) # noqa: E501
def get_price_triggered_delivery_order_with_http_info(self, settle, order_id, **kwargs): # noqa: E501
"""Get a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_price_triggered_delivery_order_with_http_info(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesPriceTriggeredOrder, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'order_id']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method get_price_triggered_delivery_order" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `get_price_triggered_delivery_order`"
) # noqa: E501
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and (
'order_id' not in local_var_params or local_var_params['order_id'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `order_id` when calling `get_price_triggered_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'order_id' in local_var_params:
path_params['order_id'] = local_var_params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/price_orders/{order_id}',
'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesPriceTriggeredOrder', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
def cancel_price_triggered_delivery_order(self, settle, order_id, **kwargs): # noqa: E501
"""Cancel a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_price_triggered_delivery_order(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: gate_api.FuturesPriceTriggeredOrder
:return: If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.cancel_price_triggered_delivery_order_with_http_info(settle, order_id, **kwargs) # noqa: E501
def cancel_price_triggered_delivery_order_with_http_info(self, settle, order_id, **kwargs): # noqa: E501
"""Cancel a single order # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.cancel_price_triggered_delivery_order_with_http_info(settle, order_id, async_req=True)
>>> result = thread.get()
:param bool async_req: execute request asynchronously
:param str settle: Settle currency (required)
:param str order_id: Retrieve the data of the order with the specified ID (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:rtype: tuple(gate_api.FuturesPriceTriggeredOrder, status_code(int), headers(HTTPHeaderDict))
:return: If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['settle', 'order_id']
all_params.extend(['async_req', '_return_http_data_only', '_preload_content', '_request_timeout'])
for k, v in six.iteritems(local_var_params['kwargs']):
if k not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'" " to method cancel_price_triggered_delivery_order" % k
)
local_var_params[k] = v
del local_var_params['kwargs']
# verify the required parameter 'settle' is set
if self.api_client.client_side_validation and (
'settle' not in local_var_params or local_var_params['settle'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `settle` when calling `cancel_price_triggered_delivery_order`"
) # noqa: E501
# verify the required parameter 'order_id' is set
if self.api_client.client_side_validation and (
'order_id' not in local_var_params or local_var_params['order_id'] is None # noqa: E501
): # noqa: E501
raise ApiValueError(
"Missing the required parameter `order_id` when calling `cancel_price_triggered_delivery_order`"
) # noqa: E501
collection_formats = {}
path_params = {}
if 'settle' in local_var_params:
path_params['settle'] = local_var_params['settle'] # noqa: E501
if 'order_id' in local_var_params:
path_params['order_id'] = local_var_params['order_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiv4'] # noqa: E501
return self.api_client.call_api(
'/delivery/{settle}/price_orders/{order_id}',
'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FuturesPriceTriggeredOrder', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats,
)
| 50.358314
| 344
| 0.621704
| 20,757
| 178,067
| 5.093366
| 0.016765
| 0.049488
| 0.079188
| 0.024517
| 0.981433
| 0.978983
| 0.976978
| 0.972891
| 0.966469
| 0.960151
| 0
| 0.016451
| 0.301212
| 178,067
| 3,535
| 345
| 50.37256
| 0.833201
| 0.437521
| 0
| 0.774306
| 0
| 0.013889
| 0.217393
| 0.066287
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032986
| false
| 0
| 0.002894
| 0
| 0.068866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02d916647dd54b53a38456827e430014774c2006
| 45,021
|
py
|
Python
|
calibrate_deck.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | null | null | null |
calibrate_deck.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | null | null | null |
calibrate_deck.py
|
yangqinwhu/opentron
|
653db54d5f8461ebbfd58818fee5f055f61dc7eb
|
[
"MIT"
] | 1
|
2021-06-04T05:21:59.000Z
|
2021-06-04T05:21:59.000Z
|
from opentrons import protocol_api
import json
import sys
sys.path.append("/var/lib/jupyter/notebooks")
ams2401 = """{"ordering":[["A1","B1","C1","D1"],["A2","B2","C2","D2"],["A3","B3","C3","D3"],["A4","B4","C4","D4"],["A5","B5","C5","D5"],["A6","B6","C6","D6"]],"brand":{"brand":"ams2401","brandId":[]},"metadata":{"displayName":"ams2401 5ml rack","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.47,"zDimension":72},"wells":{"A1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":69.73,"z":22},"B1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":51.83,"z":22},"C1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":33.93,"z":22},"D1":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":18.38,"y":16.03,"z":22},"A2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":69.73,"z":22},"B2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":51.83,"z":22},"C2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":33.93,"z":22},"D2":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":36.28,"y":16.03,"z":22},"A3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":69.73,"z":22},"B3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":51.83,"z":22},"C3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":33.93,"z":22},"D3":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":54.18,"y":16.03,"z":22},"A4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":69.73,"z":22},"B4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":51.83,"z":22},"C4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":33.93,"z":22},"D4":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":72.08,"y":16.03,"z":22},"A5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":69.73,"z":22},"B5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":51.83,"z":22},"C5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":33.93,"z":22},"D5":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":89.98,"y":16.03,"z":22},"A6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":69.73,"z":22},"B6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":51.83,"z":22},"C6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":33.93,"z":22},"D6":{"depth":50,"totalLiquidVolume":5000,"shape":"circular","diameter":14.2,"x":107.88,"y":16.03,"z":22}},"groups":[{"metadata":{"wellBottomShape":"v"},"wells":["A1","B1","C1","D1","A2","B2","C2","D2","A3","B3","C3","D3","A4","B4","C4","D4","A5","B5","C5","D5","A6","B6","C6","D6"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"ams2401_24_wellplate_5000ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
ams2402 = """{"ordering":[["A1","B1","C1","D1"],["A2","B2","C2","D2"],["A3","B3","C3","D3"],["A4","B4","C4","D4"],["A5","B5","C5","D5"],["A6","B6","C6","D6"]],"brand":{"brand":"ams2402","brandId":[]},"metadata":{"displayName":"Ams2402 24 Well Plate 10000 µL","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.47,"zDimension":100.2},"wells":{"A1":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":18.38,"y":69.73,"z":5.2},"B1":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":18.38,"y":51.83,"z":5.2},"C1":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":18.38,"y":33.93,"z":5.2},"D1":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":18.38,"y":16.03,"z":5.2},"A2":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":36.28,"y":69.73,"z":5.2},"B2":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":36.28,"y":51.83,"z":5.2},"C2":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":36.28,"y":33.93,"z":5.2},"D2":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":36.28,"y":16.03,"z":5.2},"A3":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":54.18,"y":69.73,"z":5.2},"B3":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":54.18,"y":51.83,"z":5.2},"C3":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":54.18,"y":33.93,"z":5.2},"D3":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":54.18,"y":16.03,"z":5.2},"A4":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":72.08,"y":69.73,"z":5.2},"B4":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":72.08,"y":51.83,"z":5.2},"C4":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":72.08,"y":33.93,"z":5.2},"D4":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":72.08,"y":16.03,"z":5.2},"A5":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":89.98,"y":69.73,"z":5.2},"B5":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":89.98,"y":51.83,"z":5.2},"C5":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":89.98,"y":33.93,"z":5.2},"D5":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":89.98,"y":16.03,"z":5.2},"A6":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":107.88,"y":69.73,"z":5.2},"B6":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":107.88,"y":51.83,"z":5.2},"C6":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":107.88,"y":33.93,"z":5.2},"D6":{"depth":95,"totalLiquidVolume":10000,"shape":"circular","diameter":14.2,"x":107.88,"y":16.03,"z":5.2}},"groups":[{"metadata":{"wellBottomShape":"flat"},"wells":["A1","B1","C1","D1","A2","B2","C2","D2","A3","B3","C3","D3","A4","B4","C4","D4","A5","B5","C5","D5","A6","B6","C6","D6"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"ams2402_24_wellplate_10000ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
ams2402 = ams2401
amsliquidtrash="""{"ordering":[["A1"]],"brand":{"brand":"ams_liquid_trash","brandId":[]},"metadata":{"displayName":"ams_liquid_trash_tipbox","displayCategory":"reservoir","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.47,"zDimension":70},"wells":{"A1":{"depth":70,"totalLiquidVolume":10000,"shape":"rectangular","xDimension":100,"yDimension":70,"x":64,"y":43.47,"z":0}},"groups":[{"metadata":{"wellBottomShape":"flat"},"wells":["A1"]}],"parameters":{"format":"irregular","quirks":["centerMultichannelOnWells","touchTipDisabled"],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"ams_liquid_trash_tipbox"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
geb_96_wellplate="""{"ordering":[["A1","B1","C1","D1","E1","F1","G1","H1"],["A2","B2","C2","D2","E2","F2","G2","H2"],["A3","B3","C3","D3","E3","F3","G3","H3"],["A4","B4","C4","D4","E4","F4","G4","H4"],["A5","B5","C5","D5","E5","F5","G5","H5"],["A6","B6","C6","D6","E6","F6","G6","H6"],["A7","B7","C7","D7","E7","F7","G7","H7"],["A8","B8","C8","D8","E8","F8","G8","H8"],["A9","B9","C9","D9","E9","F9","G9","H9"],["A10","B10","C10","D10","E10","F10","G10","H10"],["A11","B11","C11","D11","E11","F11","G11","H11"],["A12","B12","C12","D12","E12","F12","G12","H12"]],"brand":{"brand":"GEB","brandId":[]},"metadata":{"displayName":"GEB 96 Well Plate 200 µL","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.48,"zDimension":66.4},"wells":{"A1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":74.24,"z":46},"B1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":65.24,"z":46},"C1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":56.24,"z":46},"D1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":47.24,"z":46},"E1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":38.24,"z":46},"F1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":29.24,"z":46},"G1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":20.24,"z":46},"H1":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":14.38,"y":11.24,"z":46},"A2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":74.24,"z":46},"B2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":65.24,"z":46},"C2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":56.24,"z":46},"D2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":47.24,"z":46},"E2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":38.24,"z":46},"F2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":29.24,"z":46},"G2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":20.24,"z":46},"H2":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":23.38,"y":11.24,"z":46},"A3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":74.24,"z":46},"B3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":65.24,"z":46},"C3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":56.24,"z":46},"D3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":47.24,"z":46},"E3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":38.24,"z":46},"F3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":29.24,"z":46},"G3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":20.24,"z":46},"H3":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":32.38,"y":11.24,"z":46},"A4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":74.24,"z":46},"B4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":65.24,"z":46},"C4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":56.24,"z":46},"D4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":47.24,"z":46},"E4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":38.24,"z":46},"F4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":29.24,"z":46},"G4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":20.24,"z":46},"H4":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":41.38,"y":11.24,"z":46},"A5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":74.24,"z":46},"B5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":65.24,"z":46},"C5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":56.24,"z":46},"D5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":47.24,"z":46},"E5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":38.24,"z":46},"F5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":29.24,"z":46},"G5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":20.24,"z":46},"H5":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":50.38,"y":11.24,"z":46},"A6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":74.24,"z":46},"B6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":65.24,"z":46},"C6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":56.24,"z":46},"D6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":47.24,"z":46},"E6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":38.24,"z":46},"F6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":29.24,"z":46},"G6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":20.24,"z":46},"H6":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":59.38,"y":11.24,"z":46},"A7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":74.24,"z":46},"B7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":65.24,"z":46},"C7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":56.24,"z":46},"D7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":47.24,"z":46},"E7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":38.24,"z":46},"F7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":29.24,"z":46},"G7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":20.24,"z":46},"H7":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":68.38,"y":11.24,"z":46},"A8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":74.24,"z":46},"B8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":65.24,"z":46},"C8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":56.24,"z":46},"D8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":47.24,"z":46},"E8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":38.24,"z":46},"F8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":29.24,"z":46},"G8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":20.24,"z":46},"H8":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":77.38,"y":11.24,"z":46},"A9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":74.24,"z":46},"B9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":65.24,"z":46},"C9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":56.24,"z":46},"D9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":47.24,"z":46},"E9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":38.24,"z":46},"F9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":29.24,"z":46},"G9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":20.24,"z":46},"H9":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":86.38,"y":11.24,"z":46},"A10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":74.24,"z":46},"B10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":65.24,"z":46},"C10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":56.24,"z":46},"D10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":47.24,"z":46},"E10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":38.24,"z":46},"F10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":29.24,"z":46},"G10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":20.24,"z":46},"H10":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":95.38,"y":11.24,"z":46},"A11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":74.24,"z":46},"B11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":65.24,"z":46},"C11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":56.24,"z":46},"D11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":47.24,"z":46},"E11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":38.24,"z":46},"F11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":29.24,"z":46},"G11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":20.24,"z":46},"H11":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":104.38,"y":11.24,"z":46},"A12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":74.24,"z":46},"B12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":65.24,"z":46},"C12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":56.24,"z":46},"D12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":47.24,"z":46},"E12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":38.24,"z":46},"F12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":29.24,"z":46},"G12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":20.24,"z":46},"H12":{"depth":20.4,"totalLiquidVolume":200,"shape":"circular","diameter":5.46,"x":113.38,"y":11.24,"z":46}},"groups":[{"metadata":{"wellBottomShape":"v"},"wells":["A1","B1","C1","D1","E1","F1","G1","H1","A2","B2","C2","D2","E2","F2","G2","H2","A3","B3","C3","D3","E3","F3","G3","H3","A4","B4","C4","D4","E4","F4","G4","H4","A5","B5","C5","D5","E5","F5","G5","H5","A6","B6","C6","D6","E6","F6","G6","H6","A7","B7","C7","D7","E7","F7","G7","H7","A8","B8","C8","D8","E8","F8","G8","H8","A9","B9","C9","D9","E9","F9","G9","H9","A10","B10","C10","D10","E10","F10","G10","H10","A11","B11","C11","D11","E11","F11","G11","H11","A12","B12","C12","D12","E12","F12","G12","H12"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"geb_96_wellplate_200ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
biobank_96well_saliva_1000ul = """{"ordering":[["A1","B1","C1","D1","E1","F1","G1","H1"],["A2","B2","C2","D2","E2","F2","G2","H2"],["A3","B3","C3","D3","E3","F3","G3","H3"],["A4","B4","C4","D4","E4","F4","G4","H4"],["A5","B5","C5","D5","E5","F5","G5","H5"],["A6","B6","C6","D6","E6","F6","G6","H6"],["A7","B7","C7","D7","E7","F7","G7","H7"],["A8","B8","C8","D8","E8","F8","G8","H8"],["A9","B9","C9","D9","E9","F9","G9","H9"],["A10","B10","C10","D10","E10","F10","G10","H10"],["A11","B11","C11","D11","E11","F11","G11","H11"],["A12","B12","C12","D12","E12","F12","G12","H12"]],"brand":{"brand":"Ali","brandId":[]},"metadata":{"displayName":"Biobank 96well saliva 1000uL","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.48,"zDimension":39.5},"wells":{"A1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":74.24,"z":0.5},"B1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":65.24,"z":0.5},"C1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":56.24,"z":0.5},"D1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":47.24,"z":0.5},"E1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":38.24,"z":0.5},"F1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":29.24,"z":0.5},"G1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":20.24,"z":0.5},"H1":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":14.38,"y":11.24,"z":0.5},"A2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":74.24,"z":0.5},"B2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":65.24,"z":0.5},"C2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":56.24,"z":0.5},"D2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":47.24,"z":0.5},"E2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":38.24,"z":0.5},"F2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":29.24,"z":0.5},"G2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":20.24,"z":0.5},"H2":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":23.38,"y":11.24,"z":0.5},"A3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":74.24,"z":0.5},"B3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":65.24,"z":0.5},"C3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":56.24,"z":0.5},"D3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":47.24,"z":0.5},"E3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":38.24,"z":0.5},"F3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":29.24,"z":0.5},"G3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":20.24,"z":0.5},"H3":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":32.38,"y":11.24,"z":0.5},"A4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":74.24,"z":0.5},"B4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":65.24,"z":0.5},"C4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":56.24,"z":0.5},"D4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":47.24,"z":0.5},"E4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":38.24,"z":0.5},"F4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":29.24,"z":0.5},"G4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":20.24,"z":0.5},"H4":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":41.38,"y":11.24,"z":0.5},"A5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":74.24,"z":0.5},"B5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":65.24,"z":0.5},"C5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":56.24,"z":0.5},"D5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":47.24,"z":0.5},"E5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":38.24,"z":0.5},"F5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":29.24,"z":0.5},"G5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":20.24,"z":0.5},"H5":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":50.38,"y":11.24,"z":0.5},"A6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":74.24,"z":0.5},"B6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":65.24,"z":0.5},"C6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":56.24,"z":0.5},"D6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":47.24,"z":0.5},"E6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":38.24,"z":0.5},"F6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":29.24,"z":0.5},"G6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":20.24,"z":0.5},"H6":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":59.38,"y":11.24,"z":0.5},"A7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":74.24,"z":0.5},"B7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":65.24,"z":0.5},"C7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":56.24,"z":0.5},"D7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":47.24,"z":0.5},"E7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":38.24,"z":0.5},"F7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":29.24,"z":0.5},"G7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":20.24,"z":0.5},"H7":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":68.38,"y":11.24,"z":0.5},"A8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":74.24,"z":0.5},"B8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":65.24,"z":0.5},"C8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":56.24,"z":0.5},"D8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":47.24,"z":0.5},"E8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":38.24,"z":0.5},"F8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":29.24,"z":0.5},"G8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":20.24,"z":0.5},"H8":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":77.38,"y":11.24,"z":0.5},"A9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":74.24,"z":0.5},"B9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":65.24,"z":0.5},"C9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":56.24,"z":0.5},"D9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":47.24,"z":0.5},"E9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":38.24,"z":0.5},"F9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":29.24,"z":0.5},"G9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":20.24,"z":0.5},"H9":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":86.38,"y":11.24,"z":0.5},"A10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":74.24,"z":0.5},"B10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":65.24,"z":0.5},"C10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":56.24,"z":0.5},"D10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":47.24,"z":0.5},"E10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":38.24,"z":0.5},"F10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":29.24,"z":0.5},"G10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":20.24,"z":0.5},"H10":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":95.38,"y":11.24,"z":0.5},"A11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":74.24,"z":0.5},"B11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":65.24,"z":0.5},"C11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":56.24,"z":0.5},"D11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":47.24,"z":0.5},"E11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":38.24,"z":0.5},"F11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":29.24,"z":0.5},"G11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":20.24,"z":0.5},"H11":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":104.38,"y":11.24,"z":0.5},"A12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":74.24,"z":0.5},"B12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":65.24,"z":0.5},"C12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":56.24,"z":0.5},"D12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":47.24,"z":0.5},"E12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":38.24,"z":0.5},"F12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":29.24,"z":0.5},"G12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":20.24,"z":0.5},"H12":{"depth":39,"totalLiquidVolume":1000,"shape":"circular","diameter":6,"x":113.38,"y":11.24,"z":0.5}},"groups":[{"metadata":{"wellBottomShape":"v"},"wells":["A1","B1","C1","D1","E1","F1","G1","H1","A2","B2","C2","D2","E2","F2","G2","H2","A3","B3","C3","D3","E3","F3","G3","H3","A4","B4","C4","D4","E4","F4","G4","H4","A5","B5","C5","D5","E5","F5","G5","H5","A6","B6","C6","D6","E6","F6","G6","H6","A7","B7","C7","D7","E7","F7","G7","H7","A8","B8","C8","D8","E8","F8","G8","H8","A9","B9","C9","D9","E9","F9","G9","H9","A10","B10","C10","D10","E10","F10","G10","H10","A11","B11","C11","D11","E11","F11","G11","H11","A12","B12","C12","D12","E12","F12","G12","H12"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"biobank_96well_saliva_1000ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
micronic_96_wellplate_1400ul = """{"ordering":[["A1","B1","C1","D1","E1","F1","G1","H1"],["A2","B2","C2","D2","E2","F2","G2","H2"],["A3","B3","C3","D3","E3","F3","G3","H3"],["A4","B4","C4","D4","E4","F4","G4","H4"],["A5","B5","C5","D5","E5","F5","G5","H5"],["A6","B6","C6","D6","E6","F6","G6","H6"],["A7","B7","C7","D7","E7","F7","G7","H7"],["A8","B8","C8","D8","E8","F8","G8","H8"],["A9","B9","C9","D9","E9","F9","G9","H9"],["A10","B10","C10","D10","E10","F10","G10","H10"],["A11","B11","C11","D11","E11","F11","G11","H11"],["A12","B12","C12","D12","E12","F12","G12","H12"]],"brand":{"brand":"Micronic","brandId":[]},"metadata":{"displayName":"Micronic 96 Well Plate 1400 µL","displayCategory":"wellPlate","displayVolumeUnits":"µL","tags":[]},"dimensions":{"xDimension":127.76,"yDimension":85.48,"zDimension":49},"wells":{"A1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":74.24,"z":0},"B1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":65.24,"z":0},"C1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":56.24,"z":0},"D1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":47.24,"z":0},"E1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":38.24,"z":0},"F1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":29.24,"z":0},"G1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":20.24,"z":0},"H1":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":14.38,"y":11.24,"z":0},"A2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":74.24,"z":0},"B2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":65.24,"z":0},"C2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":56.24,"z":0},"D2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":47.24,"z":0},"E2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":38.24,"z":0},"F2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":29.24,"z":0},"G2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":20.24,"z":0},"H2":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":23.38,"y":11.24,"z":0},"A3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":74.24,"z":0},"B3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":65.24,"z":0},"C3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":56.24,"z":0},"D3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":47.24,"z":0},"E3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":38.24,"z":0},"F3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":29.24,"z":0},"G3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":20.24,"z":0},"H3":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":32.38,"y":11.24,"z":0},"A4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":74.24,"z":0},"B4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":65.24,"z":0},"C4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":56.24,"z":0},"D4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":47.24,"z":0},"E4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":38.24,"z":0},"F4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":29.24,"z":0},"G4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":20.24,"z":0},"H4":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":41.38,"y":11.24,"z":0},"A5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":74.24,"z":0},"B5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":65.24,"z":0},"C5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":56.24,"z":0},"D5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":47.24,"z":0},"E5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":38.24,"z":0},"F5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":29.24,"z":0},"G5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":20.24,"z":0},"H5":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":50.38,"y":11.24,"z":0},"A6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":74.24,"z":0},"B6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":65.24,"z":0},"C6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":56.24,"z":0},"D6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":47.24,"z":0},"E6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":38.24,"z":0},"F6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":29.24,"z":0},"G6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":20.24,"z":0},"H6":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":59.38,"y":11.24,"z":0},"A7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":74.24,"z":0},"B7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":65.24,"z":0},"C7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":56.24,"z":0},"D7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":47.24,"z":0},"E7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":38.24,"z":0},"F7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":29.24,"z":0},"G7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":20.24,"z":0},"H7":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":68.38,"y":11.24,"z":0},"A8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":74.24,"z":0},"B8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":65.24,"z":0},"C8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":56.24,"z":0},"D8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":47.24,"z":0},"E8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":38.24,"z":0},"F8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":29.24,"z":0},"G8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":20.24,"z":0},"H8":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":77.38,"y":11.24,"z":0},"A9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":74.24,"z":0},"B9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":65.24,"z":0},"C9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":56.24,"z":0},"D9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":47.24,"z":0},"E9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":38.24,"z":0},"F9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":29.24,"z":0},"G9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":20.24,"z":0},"H9":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":86.38,"y":11.24,"z":0},"A10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":74.24,"z":0},"B10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":65.24,"z":0},"C10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":56.24,"z":0},"D10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":47.24,"z":0},"E10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":38.24,"z":0},"F10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":29.24,"z":0},"G10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":20.24,"z":0},"H10":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":95.38,"y":11.24,"z":0},"A11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":74.24,"z":0},"B11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":65.24,"z":0},"C11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":56.24,"z":0},"D11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":47.24,"z":0},"E11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":38.24,"z":0},"F11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":29.24,"z":0},"G11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":20.24,"z":0},"H11":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":104.38,"y":11.24,"z":0},"A12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":74.24,"z":0},"B12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":65.24,"z":0},"C12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":56.24,"z":0},"D12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":47.24,"z":0},"E12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":38.24,"z":0},"F12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":29.24,"z":0},"G12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":20.24,"z":0},"H12":{"depth":49,"totalLiquidVolume":1400,"shape":"circular","diameter":6,"x":113.38,"y":11.24,"z":0}},"groups":[{"metadata":{"wellBottomShape":"v"},"wells":["A1","B1","C1","D1","E1","F1","G1","H1","A2","B2","C2","D2","E2","F2","G2","H2","A3","B3","C3","D3","E3","F3","G3","H3","A4","B4","C4","D4","E4","F4","G4","H4","A5","B5","C5","D5","E5","F5","G5","H5","A6","B6","C6","D6","E6","F6","G6","H6","A7","B7","C7","D7","E7","F7","G7","H7","A8","B8","C8","D8","E8","F8","G8","H8","A9","B9","C9","D9","E9","F9","G9","H9","A10","B10","C10","D10","E10","F10","G10","H10","A11","B11","C11","D11","E11","F11","G11","H11","A12","B12","C12","D12","E12","F12","G12","H12"]}],"parameters":{"format":"irregular","quirks":[],"isTiprack":false,"isMagneticModuleCompatible":false,"loadName":"micronic_96_wellplate_1400ul"},"namespace":"custom_beta","version":1,"schemaVersion":2,"cornerOffsetFromSlot":{"x":0,"y":0,"z":0}}"""
# protocol = opentrons.execute.get_protocol_api('2.7')
metadata = {'apiLevel': '2.7'}
def run(protocol):
cali_p20=1
cali_p200=0
tm=0
if cali_p200:
p200_tip_name = "opentrons_96_filtertiprack_200ul"
p200_tip_slots = ["2"]
left_pip_name = "p300_multi"
p200_tips = [protocol.load_labware(p200_tip_name, slot) for slot in p200_tip_slots]
multi_pipette = protocol.load_instrument(left_pip_name, 'left', tip_racks=p200_tips)
elif cali_p20:
p20_tip_name = "opentrons_96_filtertiprack_20ul"
p20_tip_slots = ["7"]
p20_tips = [protocol.load_labware(p20_tip_name, slot) for slot in p20_tip_slots]
right_pip_name = "p20_multi_gen2"
multi_pipette = protocol.load_instrument(right_pip_name, 'right', tip_racks=p20_tips)
#'nest_96_wellplate_100ul_pcr_full_skirt'
plate_slot ="6"
rack_slots = ["3"]
trash_slot="9"
liquid_trash_rack=json.loads(amsliquidtrash)
saliva_rack = json.loads(ams2401)
saliva_rack_10ml = json.loads(ams2402)
saliva_rack_10ml_slots = ["5"]
plate_name = json.loads(geb_96_wellplate)
# saliva_rack = json.loads(biobank_96well_saliva_1000ul)
saliva_rack = json.loads(micronic_96_wellplate_1400ul)
if tm:
tm_deck=protocol.load_module('Temperature Module', 10)
tm_plate = self.tm_deck.load_labware_from_definition(saliva_rack)
src_racks = [protocol.load_labware_from_definition(saliva_rack,slot) for slot in rack_slots]
src_tubes = src_racks[0].rows()[0]
src_racks_10ml = [protocol.load_labware_from_definition(saliva_rack_10ml,slot) for slot in saliva_rack_10ml_slots]
src_tubes_10ml = src_racks_10ml[0].rows()[0]
dest_plate = protocol.load_labware_from_definition(plate_name, plate_slot)
dest_plate_2=protocol.load_labware('nest_96_wellplate_100ul_pcr_full_skirt',11)
dest_plate_3=protocol.load_labware('nest_96_wellplate_2ml_deep',4)
dest_plate_4=protocol.load_labware('nesttall_12_reservoir_22000ul',2)
trash = protocol.load_labware_from_definition(liquid_trash_rack,trash_slot)
multi_pipette.trash_container = trash
s = src_tubes[0]
s1 = src_tubes_10ml[0]
d = dest_plate.rows()[0][0]
d2 = dest_plate_2.rows()[0][0]
d3 = dest_plate_3.rows()[0][0]
d4 = dest_plate_4.rows()[0][0]
# multi_pipette.drop_tips() if multi_pipette.has_tip else 1
# multi_pipette.pick_up_tip()
multi_pipette.transfer(20,s,d)
multi_pipette.transfer(20,s1,d)
multi_pipette.transfer(20,s1,d2)
multi_pipette.transfer(20,s1,d3)
multi_pipette.transfer(20,s1,d4)
# multi_pipette.drop_tips()
| 505.853933
| 11,818
| 0.62804
| 7,556
| 45,021
| 3.714664
| 0.040498
| 0.155622
| 0.251389
| 0.150492
| 0.900064
| 0.887701
| 0.870208
| 0.844342
| 0.844342
| 0.843273
| 0
| 0.170963
| 0.010906
| 45,021
| 88
| 11,819
| 511.602273
| 0.459353
| 0.005753
| 0
| 0
| 0
| 0.098361
| 0.946174
| 0.942867
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.04918
| 0
| 0.065574
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f30ce9c923445cd2006df9b5bf78cc956aceef7f
| 771
|
py
|
Python
|
src/app/beer_garden/api/http/handlers/v1/__init__.py
|
hazmat345/beer-garden
|
a5fd3eee303d80b6a16d93c89fe8ff42fe39bfbd
|
[
"MIT"
] | null | null | null |
src/app/beer_garden/api/http/handlers/v1/__init__.py
|
hazmat345/beer-garden
|
a5fd3eee303d80b6a16d93c89fe8ff42fe39bfbd
|
[
"MIT"
] | null | null | null |
src/app/beer_garden/api/http/handlers/v1/__init__.py
|
hazmat345/beer-garden
|
a5fd3eee303d80b6a16d93c89fe8ff42fe39bfbd
|
[
"MIT"
] | null | null | null |
# flake8: noqa
import beer_garden.api.http.handlers.v1.admin
import beer_garden.api.http.handlers.v1.command
import beer_garden.api.http.handlers.v1.event
import beer_garden.api.http.handlers.v1.instance
import beer_garden.api.http.handlers.v1.job
import beer_garden.api.http.handlers.v1.logging
import beer_garden.api.http.handlers.v1.permissions
import beer_garden.api.http.handlers.v1.queue
import beer_garden.api.http.handlers.v1.request
import beer_garden.api.http.handlers.v1.role
import beer_garden.api.http.handlers.v1.system
import beer_garden.api.http.handlers.v1.token
import beer_garden.api.http.handlers.v1.user
import beer_garden.api.http.handlers.v1.garden
import beer_garden.api.http.handlers.v1.forward
import beer_garden.api.http.handlers.v1.namespace
| 40.578947
| 51
| 0.846952
| 130
| 771
| 4.9
| 0.184615
| 0.251177
| 0.401884
| 0.477237
| 0.828885
| 0.828885
| 0.828885
| 0
| 0
| 0
| 0
| 0.023129
| 0.046693
| 771
| 18
| 52
| 42.833333
| 0.843537
| 0.015564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
b8bbe7dffb3050c2ada14459e62b983c6b61a6d6
| 82
|
py
|
Python
|
convlstm/__init__.py
|
nmheim/ConvLSTM_pytorch
|
365785aa108e8bc7f1276c7c5e106df3c6cf1efe
|
[
"MIT"
] | null | null | null |
convlstm/__init__.py
|
nmheim/ConvLSTM_pytorch
|
365785aa108e8bc7f1276c7c5e106df3c6cf1efe
|
[
"MIT"
] | null | null | null |
convlstm/__init__.py
|
nmheim/ConvLSTM_pytorch
|
365785aa108e8bc7f1276c7c5e106df3c6cf1efe
|
[
"MIT"
] | null | null | null |
from convlstm.convlstm import ConvLSTMCell
from convlstm.convlstm import ConvLSTM
| 27.333333
| 42
| 0.878049
| 10
| 82
| 7.2
| 0.4
| 0.333333
| 0.555556
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 82
| 2
| 43
| 41
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b22eb26cfc8fa0d587bc7b6a2d45f14cd528bc72
| 1,784
|
py
|
Python
|
torch_tools/data/datasets/sample.py
|
gregunz/TorchTools
|
19a33f2e4cd38f86b74bd732949516df66f9e24f
|
[
"MIT"
] | null | null | null |
torch_tools/data/datasets/sample.py
|
gregunz/TorchTools
|
19a33f2e4cd38f86b74bd732949516df66f9e24f
|
[
"MIT"
] | null | null | null |
torch_tools/data/datasets/sample.py
|
gregunz/TorchTools
|
19a33f2e4cd38f86b74bd732949516df66f9e24f
|
[
"MIT"
] | null | null | null |
import random
from torch.utils.data import Dataset
class Sample(Dataset):
def __init__(self, dataset: Dataset, num_samples: int = None, random_sampling=False):
assert num_samples is None or num_samples > 0, 'need at least one sample in the dataset'
self.dataset = dataset
n = len(dataset)
if num_samples is None:
num_samples = n
self.all_indices = list(range(num_samples))
if random_sampling:
random.shuffle(self.all_indices)
self.indices = [i % n for i in self.all_indices[:num_samples]]
def __getitem__(self, idx):
return self.dataset[self.indices[idx]]
def __len__(self):
return len(self.indices)
# class Sample(Dataset):
# def __init__(self, dataset: Dataset, num_samples: int = None, random_sampling=False):
# assert num_samples is None or num_samples > 0, 'need at least one sample in the dataset'
#
# self.random_sampling = random_sampling
# self.dataset = dataset
#
# n = len(dataset)
#
# if num_samples is None:
# num_samples = n
#
# self.all_indices = list(range(n))
#
# if random_sampling:
# random.shuffle(self.all_indices)
# self.n_idx = 0
#
# self.indices = [i % n for i in self.all_indices[:num_samples]]
#
# def __getitem__(self, idx):
# if self.random_sampling:
# self.n_idx += 1
# if self.n_idx % len(self) == 0:
# self.n_idx = 0
# random.shuffle(self.all_indices)
# self.indices = [i % len(self.dataset) for i in self.all_indices[:num_samples]]
#
# return self.dataset[self.indices[idx]]
#
# def __len__(self):
# return len(self.indices)
| 29.245902
| 98
| 0.601457
| 234
| 1,784
| 4.34188
| 0.179487
| 0.137795
| 0.110236
| 0.062992
| 0.845472
| 0.845472
| 0.845472
| 0.845472
| 0.777559
| 0.688976
| 0
| 0.004758
| 0.293161
| 1,784
| 60
| 99
| 29.733333
| 0.800952
| 0.560538
| 0
| 0
| 0
| 0
| 0.051931
| 0
| 0
| 0
| 0
| 0
| 0.058824
| 1
| 0.176471
| false
| 0
| 0.117647
| 0.117647
| 0.470588
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
b2641346219db12adea00c841fd0ad4eb4d9a237
| 106
|
py
|
Python
|
src/lib/detectors/edgetpu/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
src/lib/detectors/edgetpu/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
src/lib/detectors/edgetpu/defaults.py
|
l-maia/viseron
|
d762be93db74f780db13ac332bf8673c41592aa9
|
[
"MIT"
] | null | null | null |
MODEL_PATH = "/detectors/models/edgetpu/model.tflite"
LABEL_PATH = "/detectors/models/edgetpu/labels.txt"
| 35.333333
| 53
| 0.792453
| 14
| 106
| 5.857143
| 0.642857
| 0.317073
| 0.463415
| 0.634146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 106
| 2
| 54
| 53
| 0.82
| 0
| 0
| 0
| 0
| 0
| 0.698113
| 0.698113
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b2647c341b5c90cb5f37125a6d024773914ec9ec
| 11,576
|
py
|
Python
|
graphene_django/tests/test_get_queryset.py
|
loft-orbital/graphene-django
|
8643404722e64ec77e621d3891dda1eb56824f32
|
[
"MIT"
] | 1
|
2021-03-03T21:39:51.000Z
|
2021-03-03T21:39:51.000Z
|
graphene_django/tests/test_get_queryset.py
|
loft-orbital/graphene-django
|
8643404722e64ec77e621d3891dda1eb56824f32
|
[
"MIT"
] | 2
|
2021-02-10T09:32:11.000Z
|
2021-02-10T09:32:34.000Z
|
graphene_django/tests/test_get_queryset.py
|
loft-orbital/graphene-django
|
8643404722e64ec77e621d3891dda1eb56824f32
|
[
"MIT"
] | null | null | null |
import pytest
import graphene
from graphene.relay import Node
from graphql_relay import to_global_id
from ..fields import DjangoConnectionField
from ..types import DjangoObjectType
from .models import Article, Reporter
class TestShouldCallGetQuerySetOnForeignKey:
"""
Check that the get_queryset method is called in both forward and reversed direction
of a foreignkey on types.
(see issue #1111)
"""
@pytest.fixture(autouse=True)
def setup_schema(self):
class ReporterType(DjangoObjectType):
class Meta:
model = Reporter
@classmethod
def get_queryset(cls, queryset, info):
if info.context and info.context.get("admin"):
return queryset
raise Exception("Not authorized to access reporters.")
class ArticleType(DjangoObjectType):
class Meta:
model = Article
@classmethod
def get_queryset(cls, queryset, info):
return queryset.exclude(headline__startswith="Draft")
class Query(graphene.ObjectType):
reporter = graphene.Field(ReporterType, id=graphene.ID(required=True))
article = graphene.Field(ArticleType, id=graphene.ID(required=True))
def resolve_reporter(self, info, id):
return (
ReporterType.get_queryset(Reporter.objects, info)
.filter(id=id)
.last()
)
def resolve_article(self, info, id):
return (
ArticleType.get_queryset(Article.objects, info).filter(id=id).last()
)
self.schema = graphene.Schema(query=Query)
self.reporter = Reporter.objects.create(first_name="Jane", last_name="Doe")
self.articles = [
Article.objects.create(
headline="A fantastic article",
reporter=self.reporter,
editor=self.reporter,
),
Article.objects.create(
headline="Draft: My next best seller",
reporter=self.reporter,
editor=self.reporter,
),
]
def test_get_queryset_called_on_field(self):
# If a user tries to access an article it is fine as long as it's not a draft one
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
}
}
"""
# Non-draft
result = self.schema.execute(query, variables={"id": self.articles[0].id})
assert not result.errors
assert result.data["article"] == {
"headline": "A fantastic article",
}
# Draft
result = self.schema.execute(query, variables={"id": self.articles[1].id})
assert not result.errors
assert result.data["article"] is None
# If a non admin user tries to access a reporter they should get our authorization error
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
}
}
"""
result = self.schema.execute(query, variables={"id": self.reporter.id})
assert len(result.errors) == 1
assert result.errors[0].message == "Not authorized to access reporters."
# An admin user should be able to get reporters
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
}
}
"""
result = self.schema.execute(
query, variables={"id": self.reporter.id}, context_value={"admin": True},
)
assert not result.errors
assert result.data == {"reporter": {"firstName": "Jane"}}
def test_get_queryset_called_on_foreignkey(self):
# If a user tries to access a reporter through an article they should get our authorization error
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
reporter {
firstName
}
}
}
"""
result = self.schema.execute(query, variables={"id": self.articles[0].id})
assert len(result.errors) == 1
assert result.errors[0].message == "Not authorized to access reporters."
# An admin user should be able to get reporters through an article
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
reporter {
firstName
}
}
}
"""
result = self.schema.execute(
query, variables={"id": self.articles[0].id}, context_value={"admin": True},
)
assert not result.errors
assert result.data["article"] == {
"headline": "A fantastic article",
"reporter": {"firstName": "Jane"},
}
# An admin user should not be able to access draft article through a reporter
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
articles {
headline
}
}
}
"""
result = self.schema.execute(
query, variables={"id": self.reporter.id}, context_value={"admin": True},
)
assert not result.errors
assert result.data["reporter"] == {
"firstName": "Jane",
"articles": [{"headline": "A fantastic article"}],
}
class TestShouldCallGetQuerySetOnForeignKeyNode:
"""
Check that the get_queryset method is called in both forward and reversed direction
of a foreignkey on types using a node interface.
(see issue #1111)
"""
@pytest.fixture(autouse=True)
def setup_schema(self):
class ReporterType(DjangoObjectType):
class Meta:
model = Reporter
interfaces = (Node,)
@classmethod
def get_queryset(cls, queryset, info):
if info.context and info.context.get("admin"):
return queryset
raise Exception("Not authorized to access reporters.")
class ArticleType(DjangoObjectType):
class Meta:
model = Article
interfaces = (Node,)
@classmethod
def get_queryset(cls, queryset, info):
return queryset.exclude(headline__startswith="Draft")
class Query(graphene.ObjectType):
reporter = Node.Field(ReporterType)
article = Node.Field(ArticleType)
self.schema = graphene.Schema(query=Query)
self.reporter = Reporter.objects.create(first_name="Jane", last_name="Doe")
self.articles = [
Article.objects.create(
headline="A fantastic article",
reporter=self.reporter,
editor=self.reporter,
),
Article.objects.create(
headline="Draft: My next best seller",
reporter=self.reporter,
editor=self.reporter,
),
]
def test_get_queryset_called_on_node(self):
# If a user tries to access an article it is fine as long as it's not a draft one
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
}
}
"""
# Non-draft
result = self.schema.execute(
query, variables={"id": to_global_id("ArticleType", self.articles[0].id)}
)
assert not result.errors
assert result.data["article"] == {
"headline": "A fantastic article",
}
# Draft
result = self.schema.execute(
query, variables={"id": to_global_id("ArticleType", self.articles[1].id)}
)
assert not result.errors
assert result.data["article"] is None
# If a non admin user tries to access a reporter they should get our authorization error
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
}
}
"""
result = self.schema.execute(
query, variables={"id": to_global_id("ReporterType", self.reporter.id)}
)
assert len(result.errors) == 1
assert result.errors[0].message == "Not authorized to access reporters."
# An admin user should be able to get reporters
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
}
}
"""
result = self.schema.execute(
query,
variables={"id": to_global_id("ReporterType", self.reporter.id)},
context_value={"admin": True},
)
assert not result.errors
assert result.data == {"reporter": {"firstName": "Jane"}}
def test_get_queryset_called_on_foreignkey(self):
# If a user tries to access a reporter through an article they should get our authorization error
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
reporter {
firstName
}
}
}
"""
result = self.schema.execute(
query, variables={"id": to_global_id("ArticleType", self.articles[0].id)}
)
assert len(result.errors) == 1
assert result.errors[0].message == "Not authorized to access reporters."
# An admin user should be able to get reporters through an article
query = """
query getArticle($id: ID!) {
article(id: $id) {
headline
reporter {
firstName
}
}
}
"""
result = self.schema.execute(
query,
variables={"id": to_global_id("ArticleType", self.articles[0].id)},
context_value={"admin": True},
)
assert not result.errors
assert result.data["article"] == {
"headline": "A fantastic article",
"reporter": {"firstName": "Jane"},
}
# An admin user should not be able to access draft article through a reporter
query = """
query getReporter($id: ID!) {
reporter(id: $id) {
firstName
articles {
edges {
node {
headline
}
}
}
}
}
"""
result = self.schema.execute(
query,
variables={"id": to_global_id("ReporterType", self.reporter.id)},
context_value={"admin": True},
)
assert not result.errors
assert result.data["reporter"] == {
"firstName": "Jane",
"articles": {"edges": [{"node": {"headline": "A fantastic article"}}]},
}
| 32.516854
| 105
| 0.507256
| 1,087
| 11,576
| 5.344066
| 0.116835
| 0.017903
| 0.038561
| 0.055431
| 0.896884
| 0.888621
| 0.880014
| 0.880014
| 0.877259
| 0.872439
| 0
| 0.003416
| 0.393055
| 11,576
| 355
| 106
| 32.608451
| 0.82337
| 0.104613
| 0
| 0.704626
| 0
| 0
| 0.315248
| 0
| 0
| 0
| 0
| 0
| 0.099644
| 1
| 0.042705
| false
| 0
| 0.024911
| 0.014235
| 0.131673
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b270b4baba10461f937de65cfd940c26c405c569
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_urgot/na_urgot_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_urgot/na_urgot_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_urgot/na_urgot_jng.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Urgot_Jng_Aatrox(Ratings):
pass
class NA_Urgot_Jng_Ahri(Ratings):
pass
class NA_Urgot_Jng_Akali(Ratings):
pass
class NA_Urgot_Jng_Alistar(Ratings):
pass
class NA_Urgot_Jng_Amumu(Ratings):
pass
class NA_Urgot_Jng_Anivia(Ratings):
pass
class NA_Urgot_Jng_Annie(Ratings):
pass
class NA_Urgot_Jng_Ashe(Ratings):
pass
class NA_Urgot_Jng_AurelionSol(Ratings):
pass
class NA_Urgot_Jng_Azir(Ratings):
pass
class NA_Urgot_Jng_Bard(Ratings):
pass
class NA_Urgot_Jng_Blitzcrank(Ratings):
pass
class NA_Urgot_Jng_Brand(Ratings):
pass
class NA_Urgot_Jng_Braum(Ratings):
pass
class NA_Urgot_Jng_Caitlyn(Ratings):
pass
class NA_Urgot_Jng_Camille(Ratings):
pass
class NA_Urgot_Jng_Cassiopeia(Ratings):
pass
class NA_Urgot_Jng_Chogath(Ratings):
pass
class NA_Urgot_Jng_Corki(Ratings):
pass
class NA_Urgot_Jng_Darius(Ratings):
pass
class NA_Urgot_Jng_Diana(Ratings):
pass
class NA_Urgot_Jng_Draven(Ratings):
pass
class NA_Urgot_Jng_DrMundo(Ratings):
pass
class NA_Urgot_Jng_Ekko(Ratings):
pass
class NA_Urgot_Jng_Elise(Ratings):
pass
class NA_Urgot_Jng_Evelynn(Ratings):
pass
class NA_Urgot_Jng_Ezreal(Ratings):
pass
class NA_Urgot_Jng_Fiddlesticks(Ratings):
pass
class NA_Urgot_Jng_Fiora(Ratings):
pass
class NA_Urgot_Jng_Fizz(Ratings):
pass
class NA_Urgot_Jng_Galio(Ratings):
pass
class NA_Urgot_Jng_Gangplank(Ratings):
pass
class NA_Urgot_Jng_Garen(Ratings):
pass
class NA_Urgot_Jng_Gnar(Ratings):
pass
class NA_Urgot_Jng_Gragas(Ratings):
pass
class NA_Urgot_Jng_Graves(Ratings):
pass
class NA_Urgot_Jng_Hecarim(Ratings):
pass
class NA_Urgot_Jng_Heimerdinger(Ratings):
pass
class NA_Urgot_Jng_Illaoi(Ratings):
pass
class NA_Urgot_Jng_Irelia(Ratings):
pass
class NA_Urgot_Jng_Ivern(Ratings):
pass
class NA_Urgot_Jng_Janna(Ratings):
pass
class NA_Urgot_Jng_JarvanIV(Ratings):
pass
class NA_Urgot_Jng_Jax(Ratings):
pass
class NA_Urgot_Jng_Jayce(Ratings):
pass
class NA_Urgot_Jng_Jhin(Ratings):
pass
class NA_Urgot_Jng_Jinx(Ratings):
pass
class NA_Urgot_Jng_Kalista(Ratings):
pass
class NA_Urgot_Jng_Karma(Ratings):
pass
class NA_Urgot_Jng_Karthus(Ratings):
pass
class NA_Urgot_Jng_Kassadin(Ratings):
pass
class NA_Urgot_Jng_Katarina(Ratings):
pass
class NA_Urgot_Jng_Kayle(Ratings):
pass
class NA_Urgot_Jng_Kayn(Ratings):
pass
class NA_Urgot_Jng_Kennen(Ratings):
pass
class NA_Urgot_Jng_Khazix(Ratings):
pass
class NA_Urgot_Jng_Kindred(Ratings):
pass
class NA_Urgot_Jng_Kled(Ratings):
pass
class NA_Urgot_Jng_KogMaw(Ratings):
pass
class NA_Urgot_Jng_Leblanc(Ratings):
pass
class NA_Urgot_Jng_LeeSin(Ratings):
pass
class NA_Urgot_Jng_Leona(Ratings):
pass
class NA_Urgot_Jng_Lissandra(Ratings):
pass
class NA_Urgot_Jng_Lucian(Ratings):
pass
class NA_Urgot_Jng_Lulu(Ratings):
pass
class NA_Urgot_Jng_Lux(Ratings):
pass
class NA_Urgot_Jng_Malphite(Ratings):
pass
class NA_Urgot_Jng_Malzahar(Ratings):
pass
class NA_Urgot_Jng_Maokai(Ratings):
pass
class NA_Urgot_Jng_MasterYi(Ratings):
pass
class NA_Urgot_Jng_MissFortune(Ratings):
pass
class NA_Urgot_Jng_MonkeyKing(Ratings):
pass
class NA_Urgot_Jng_Mordekaiser(Ratings):
pass
class NA_Urgot_Jng_Morgana(Ratings):
pass
class NA_Urgot_Jng_Nami(Ratings):
pass
class NA_Urgot_Jng_Nasus(Ratings):
pass
class NA_Urgot_Jng_Nautilus(Ratings):
pass
class NA_Urgot_Jng_Nidalee(Ratings):
pass
class NA_Urgot_Jng_Nocturne(Ratings):
pass
class NA_Urgot_Jng_Nunu(Ratings):
pass
class NA_Urgot_Jng_Olaf(Ratings):
pass
class NA_Urgot_Jng_Orianna(Ratings):
pass
class NA_Urgot_Jng_Ornn(Ratings):
pass
class NA_Urgot_Jng_Pantheon(Ratings):
pass
class NA_Urgot_Jng_Poppy(Ratings):
pass
class NA_Urgot_Jng_Quinn(Ratings):
pass
class NA_Urgot_Jng_Rakan(Ratings):
pass
class NA_Urgot_Jng_Rammus(Ratings):
pass
class NA_Urgot_Jng_RekSai(Ratings):
pass
class NA_Urgot_Jng_Renekton(Ratings):
pass
class NA_Urgot_Jng_Rengar(Ratings):
pass
class NA_Urgot_Jng_Riven(Ratings):
pass
class NA_Urgot_Jng_Rumble(Ratings):
pass
class NA_Urgot_Jng_Ryze(Ratings):
pass
class NA_Urgot_Jng_Sejuani(Ratings):
pass
class NA_Urgot_Jng_Shaco(Ratings):
pass
class NA_Urgot_Jng_Shen(Ratings):
pass
class NA_Urgot_Jng_Shyvana(Ratings):
pass
class NA_Urgot_Jng_Singed(Ratings):
pass
class NA_Urgot_Jng_Sion(Ratings):
pass
class NA_Urgot_Jng_Sivir(Ratings):
pass
class NA_Urgot_Jng_Skarner(Ratings):
pass
class NA_Urgot_Jng_Sona(Ratings):
pass
class NA_Urgot_Jng_Soraka(Ratings):
pass
class NA_Urgot_Jng_Swain(Ratings):
pass
class NA_Urgot_Jng_Syndra(Ratings):
pass
class NA_Urgot_Jng_TahmKench(Ratings):
pass
class NA_Urgot_Jng_Taliyah(Ratings):
pass
class NA_Urgot_Jng_Talon(Ratings):
pass
class NA_Urgot_Jng_Taric(Ratings):
pass
class NA_Urgot_Jng_Teemo(Ratings):
pass
class NA_Urgot_Jng_Thresh(Ratings):
pass
class NA_Urgot_Jng_Tristana(Ratings):
pass
class NA_Urgot_Jng_Trundle(Ratings):
pass
class NA_Urgot_Jng_Tryndamere(Ratings):
pass
class NA_Urgot_Jng_TwistedFate(Ratings):
pass
class NA_Urgot_Jng_Twitch(Ratings):
pass
class NA_Urgot_Jng_Udyr(Ratings):
pass
class NA_Urgot_Jng_Urgot(Ratings):
pass
class NA_Urgot_Jng_Varus(Ratings):
pass
class NA_Urgot_Jng_Vayne(Ratings):
pass
class NA_Urgot_Jng_Veigar(Ratings):
pass
class NA_Urgot_Jng_Velkoz(Ratings):
pass
class NA_Urgot_Jng_Vi(Ratings):
pass
class NA_Urgot_Jng_Viktor(Ratings):
pass
class NA_Urgot_Jng_Vladimir(Ratings):
pass
class NA_Urgot_Jng_Volibear(Ratings):
pass
class NA_Urgot_Jng_Warwick(Ratings):
pass
class NA_Urgot_Jng_Xayah(Ratings):
pass
class NA_Urgot_Jng_Xerath(Ratings):
pass
class NA_Urgot_Jng_XinZhao(Ratings):
pass
class NA_Urgot_Jng_Yasuo(Ratings):
pass
class NA_Urgot_Jng_Yorick(Ratings):
pass
class NA_Urgot_Jng_Zac(Ratings):
pass
class NA_Urgot_Jng_Zed(Ratings):
pass
class NA_Urgot_Jng_Ziggs(Ratings):
pass
class NA_Urgot_Jng_Zilean(Ratings):
pass
class NA_Urgot_Jng_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
b29c969f0823f15db04ff653737958f5acfc9043
| 20,690
|
py
|
Python
|
smuthi/linearsystem/linear_system_cuda.py
|
parkerwray/smuthi-1
|
a5ced07461b8fd223dc37d28259261ceed78aed5
|
[
"MIT"
] | null | null | null |
smuthi/linearsystem/linear_system_cuda.py
|
parkerwray/smuthi-1
|
a5ced07461b8fd223dc37d28259261ceed78aed5
|
[
"MIT"
] | null | null | null |
smuthi/linearsystem/linear_system_cuda.py
|
parkerwray/smuthi-1
|
a5ced07461b8fd223dc37d28259261ceed78aed5
|
[
"MIT"
] | null | null | null |
"""This module contains CUDA source code for the evaluation of the coupling
matrix from lookups."""
# The following cuda kernel multiplies the coupling matrix to a vector. It is based on linear interpolation of
# the lookup table.
#
# input arguments of the cuda kernel:
# n (np.uint32): n[i] contains the mutlipole multi-index with regard to self.l_max and self.m_max of the i-th
# entry of a system vector
# m (np.float32): m[i] contains the multipole order with regard to particle.l_max and particle.m_max
# x_pos (np.float32): x_pos[i] contains the respective particle x-position
# y_pos (np.float32): y_pos[i] contains the respective particle y-position
# z_pos (np.float32): z_pos[i] contains the respective particle z-position
# re_lookup_pl (np.float32): the real part of the lookup table for the z1+z2 part of the Sommerfeld integral,
# in the format (rho, sum_z, n1, n2)
# im_lookup_pl (np.float32): the imaginary part of the lookup table for the z1+z2 part of the Sommerfeld
# integral, in the format (rho, sum_z, n1, n2)
# re_lookup_mn (np.float32): the real part of the lookup table for the z1-z2 part of the Sommerfeld integral,
# in the format (rho, diff_z, n1, n2)
# im_lookup_mn (np.float32): the imaginary part of the lookup table for the z1-z2 part of the Sommerfeld
# integral, in the format (rho, diff_z, n1, n2)
# re_in_vec (np.float32): the real part of the vector to be multiplied with the coupling matrix
# im_in_vec (np.float32): the imaginary part of the vector to be multiplied with the coupling matrix
# re_result_vec (np.float32): the real part of the vector into which the result is written
# im_result_vec (np.float32): the imaginary part of the vector into which the result is written
linear_volume_lookup_source = """
#define BLOCKSIZE %i
#define NUMBER_OF_UNKNOWNS %i
#define Z_ARRAY_LENGTH %i
#define MIN_RHO %f
#define MIN_Z_SUM %f
#define MIN_Z_DIFF %f
#define LOOKUP_RESOLUTION %f
__global__ void coupling_kernel(const int *n, const float *m, const float *x_pos, const float *y_pos,
const float *z_pos, const float *re_lookup_pl, const float *im_lookup_pl,
const float *re_lookup_mn, const float *im_lookup_mn,
const float *re_in_vec, const float *im_in_vec,
float *re_result, float *im_result)
{
unsigned int i1 = blockIdx.x * blockDim.x + threadIdx.x;
if(i1 >= NUMBER_OF_UNKNOWNS) return;
const float x1 = x_pos[i1];
const float y1 = y_pos[i1];
const float z1 = z_pos[i1];
const int n1 = n[i1];
const float m1 = m[i1];
re_result[i1] = 0.0;
im_result[i1] = 0.0;
for (int i2=0; i2<NUMBER_OF_UNKNOWNS; i2++)
{
float x21 = x1 - x_pos[i2];
float y21 = y1 - y_pos[i2];
float sz21 = z1 + z_pos[i2];
float dz21 = z1 - z_pos[i2];
const int n2 = n[i2];
const float m2 = m[i2];
float rho = sqrt(x21*x21+y21*y21);
float phi = atan2(y21,x21);
int rho_idx = (int) floor((rho - MIN_RHO) / LOOKUP_RESOLUTION);
float rho_w = (rho - MIN_RHO) / LOOKUP_RESOLUTION - floor((rho - MIN_RHO) / LOOKUP_RESOLUTION);
int sz_idx = (int) floor((sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION);
float sz_w = (sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION - floor((sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION);
int dz_idx = (int) floor((dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION);
float dz_w = (dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION
- floor((dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION);
int idx_rho_sz = rho_idx * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ sz_idx * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rho_szpl1 = rho_idx * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ (sz_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rhopl1_sz = (rho_idx + 1) * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ sz_idx * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rhopl1_szpl1 = (rho_idx + 1) * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ (sz_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rho_dz = rho_idx * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE + dz_idx * BLOCKSIZE * BLOCKSIZE
+ n1 * BLOCKSIZE + n2;
int idx_rho_dzpl1 = rho_idx * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ (dz_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rhopl1_dz = (rho_idx + 1) * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ dz_idx * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_rhopl1_dzpl1 = (rho_idx + 1) * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE
+ (dz_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
float f_rhoi = re_lookup_pl[idx_rho_sz] * (1 - sz_w) + re_lookup_pl[idx_rho_szpl1] * sz_w;
float f_rhoipl1 = re_lookup_pl[idx_rhopl1_sz] * (1 - sz_w) + re_lookup_pl[idx_rhopl1_szpl1] * sz_w;
float re_si_pl = f_rhoi * (1 - rho_w) + f_rhoipl1 * rho_w;
f_rhoi = im_lookup_pl[idx_rho_sz] * (1 - sz_w) + im_lookup_pl[idx_rho_szpl1] * sz_w;
f_rhoipl1 = im_lookup_pl[idx_rhopl1_sz] * (1 - sz_w) + im_lookup_pl[idx_rhopl1_szpl1] * sz_w;
float im_si_pl = f_rhoi * (1 - rho_w) + f_rhoipl1 * rho_w;
f_rhoi = re_lookup_mn[idx_rho_dz] * (1 - dz_w) + re_lookup_mn[idx_rho_dzpl1] * dz_w;
f_rhoipl1 = re_lookup_mn[idx_rhopl1_dz] * (1 - dz_w) + re_lookup_mn[idx_rhopl1_dzpl1] * dz_w;
float re_si_mn = f_rhoi * (1 - rho_w) + f_rhoipl1 * rho_w;
f_rhoi = im_lookup_mn[idx_rho_dz] * (1 - dz_w) + im_lookup_mn[idx_rho_dzpl1] * dz_w;
f_rhoipl1 = im_lookup_mn[idx_rhopl1_dz] * (1 - dz_w) + im_lookup_mn[idx_rhopl1_dzpl1] * dz_w;
float im_si_mn = f_rhoi * (1 - rho_w) + f_rhoipl1 * rho_w;
float re_eimphi = cosf((m2 - m1) * phi);
float im_eimphi = sinf((m2 - m1) * phi);
float re_w = re_eimphi * (re_si_pl + re_si_mn) - im_eimphi * (im_si_pl + im_si_mn);
float im_w = im_eimphi * (re_si_pl + re_si_mn) + re_eimphi * (im_si_pl + im_si_mn);
re_result[i1] += re_w * re_in_vec[i2] - im_w * im_in_vec[i2];
im_result[i1] += re_w * im_in_vec[i2] + im_w * re_in_vec[i2];
}
}"""
# The following cuda kernel multiplies the coupling matrix to a vector. It is based on cubic Hermite spline interpolation
# of the lookup table.
#
# input arguments of the cuda kernel:
# n (np.uint32): n[i] contains the mutlipole multi-index with regard to self.l_max and self.m_max of the i-th
# entry of a system vector
# m (np.float32): m[i] contains the multipole order with regard to particle.l_max and particle.m_max
# x_pos (np.float32): x_pos[i] contains the respective particle x-position
# y_pos (np.float32): y_pos[i] contains the respective particle y-position
# z_pos (np.float32): z_pos[i] contains the respective particle z-position
# re_lookup_pl (np.float32): the real part of the lookup table for the z1+z2 part of the Sommerfeld integral,
# in the format (rho, sum_z, n1, n2)
# im_lookup_pl (np.float32): the imaginary part of the lookup table for the z1+z2 part of the Sommerfeld
# integral, in the format (rho, sum_z, n1, n2)
# re_lookup_mn (np.float32): the real part of the lookup table for the z1-z2 part of the Sommerfeld integral,
# in the format (rho, diff_z, n1, n2)
# im_lookup_mn (np.float32): the imaginary part of the lookup table for the z1-z2 part of the Sommerfeld
# integral, in the format (rho, diff_z, n1, n2)
# re_in_vec (np.float32): the real part of the vector to be multiplied with the coupling matrix
# im_in_vec (np.float32): the imaginary part of the vector to be multiplied with the coupling matrix
# re_result_vec (np.float32): the real part of the vector into which the result is written
# im_result_vec (np.float32): the imaginary part of the vector into which the result is written
cubic_volume_lookup_source = """
#define BLOCKSIZE %i
#define NUMBER_OF_UNKNOWNS %i
#define Z_ARRAY_LENGTH %i
#define MIN_RHO %f
#define MIN_Z_SUM %f
#define MIN_Z_DIFF %f
#define LOOKUP_RESOLUTION %f
__device__ int lookup_index(int const i_r, int const i_z, int const n1, int const n2)
{
return i_r * Z_ARRAY_LENGTH * BLOCKSIZE * BLOCKSIZE + i_z * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
}
__device__ float cubic_interpolation(float const w, float const lookup_imn1, float const lookup_i,
float const lookup_ipl1, float const lookup_ipl2)
{
return ((-w*w*w+2*w*w-w) * lookup_imn1 + (3*w*w*w-5*w*w+2) * lookup_i + (-3*w*w*w+4*w*w+w) * lookup_ipl1
+ (w*w*w-w*w) * lookup_ipl2) / 2;
}
__device__ float lookup_2D(int const i_rho, float const w_rho, int const i_z, float const w_z, int const n1,
int const n2, float const *lookup)
{
int i_zmn1 = lookup_index(i_rho-1, i_z-1, n1, n2);
int i = lookup_index(i_rho-1, i_z, n1, n2);
int i_zpl1 = lookup_index(i_rho-1, i_z+1, n1, n2);
int i_zpl2 = lookup_index(i_rho-1, i_z+2, n1, n2);
float lookup_rmn1 = cubic_interpolation(w_z, lookup[i_zmn1], lookup[i], lookup[i_zpl1], lookup[i_zpl2]);
i_zmn1 = lookup_index(i_rho, i_z-1, n1, n2);
i = lookup_index(i_rho, i_z, n1, n2);
i_zpl1 = lookup_index(i_rho, i_z+1, n1, n2);
i_zpl2 = lookup_index(i_rho, i_z+2, n1, n2);
float lookup_r = cubic_interpolation(w_z, lookup[i_zmn1], lookup[i], lookup[i_zpl1], lookup[i_zpl2]);
i_zmn1 = lookup_index(i_rho+1, i_z-1, n1, n2);
i = lookup_index(i_rho+1, i_z, n1, n2);
i_zpl1 = lookup_index(i_rho+1, i_z+1, n1, n2);
i_zpl2 = lookup_index(i_rho+1, i_z+2, n1, n2);
float lookup_rpl1 = cubic_interpolation(w_z, lookup[i_zmn1], lookup[i], lookup[i_zpl1], lookup[i_zpl2]);
i_zmn1 = lookup_index(i_rho+2, i_z-1, n1, n2);
i = lookup_index(i_rho+2, i_z, n1, n2);
i_zpl1 = lookup_index(i_rho+2, i_z+1, n1, n2);
i_zpl2 = lookup_index(i_rho+2, i_z+2, n1, n2);
float lookup_rpl2 = cubic_interpolation(w_z, lookup[i_zmn1], lookup[i], lookup[i_zpl1], lookup[i_zpl2]);
return cubic_interpolation(w_rho, lookup_rmn1, lookup_r, lookup_rpl1, lookup_rpl2);
}
__global__ void coupling_kernel(const int *n, const float *m, const float *x_pos, const float *y_pos,
const float *z_pos, const float *re_lookup_pl, const float *im_lookup_pl,
const float *re_lookup_mn, const float *im_lookup_mn,
const float *re_in_vec, const float *im_in_vec,
float *re_result, float *im_result)
{
unsigned int i1 = blockIdx.x * blockDim.x + threadIdx.x;
if(i1 >= NUMBER_OF_UNKNOWNS) return;
const float x1 = x_pos[i1];
const float y1 = y_pos[i1];
const float z1 = z_pos[i1];
const int n1 = n[i1];
const float m1 = m[i1];
re_result[i1] = 0.0;
im_result[i1] = 0.0;
for (int i2=0; i2<NUMBER_OF_UNKNOWNS; i2++)
{
float x21 = x1 - x_pos[i2];
float y21 = y1 - y_pos[i2];
float sz21 = z1 + z_pos[i2];
float dz21 = z1 - z_pos[i2];
const int n2 = n[i2];
const float m2 = m[i2];
float rho = sqrt(x21*x21+y21*y21);
float phi = atan2(y21,x21);
int rho_idx = (int) floor((rho - MIN_RHO) / LOOKUP_RESOLUTION);
float rho_w = (rho - MIN_RHO) / LOOKUP_RESOLUTION - floor((rho - MIN_RHO) / LOOKUP_RESOLUTION);
int sz_idx = (int) floor((sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION);
float sz_w = (sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION - floor((sz21 - MIN_Z_SUM) / LOOKUP_RESOLUTION);
int dz_idx = (int) floor((dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION);
float dz_w = (dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION
- floor((dz21 - MIN_Z_DIFF) / LOOKUP_RESOLUTION);
float re_si_pl = lookup_2D(rho_idx, rho_w, sz_idx, sz_w, n1, n2, re_lookup_pl);
float im_si_pl = lookup_2D(rho_idx, rho_w, sz_idx, sz_w, n1, n2, im_lookup_pl);
float re_si_mn = lookup_2D(rho_idx, rho_w, dz_idx, dz_w, n1, n2, re_lookup_mn);
float im_si_mn = lookup_2D(rho_idx, rho_w, dz_idx, dz_w, n1, n2, im_lookup_mn);
float re_eimphi = cosf((m2 - m1) * phi);
float im_eimphi = sinf((m2 - m1) * phi);
float re_w = re_eimphi * (re_si_pl + re_si_mn) - im_eimphi * (im_si_pl + im_si_mn);
float im_w = im_eimphi * (re_si_pl + re_si_mn) + re_eimphi * (im_si_pl + im_si_mn);
re_result[i1] += re_w * re_in_vec[i2] - im_w * im_in_vec[i2];
im_result[i1] += re_w * im_in_vec[i2] + im_w * re_in_vec[i2];
}
}"""
# This cuda kernel multiplies the coupling matrix to a vector. It is based on linear interpolation of the lookup
# table.
#
# input arguments of the cuda kernel:
# n (np.uint32): n[i] contains the mutlipole multi-index with regard to self.l_max and self.m_max of the i-th
# entry of a system vector
# m (np.float32): m[i] contains the multipole order with regard to particle.l_max and particle.m_max
# x_pos (np.float32): x_pos[i] contains the respective particle x-position
# y_pos (np.float32): y_pos[i] contains the respective particle y-position
# re_lookup (np.float32): the real part of the lookup table, in the format [r, n1, n2]
# im_lookup (np.float32): the imaginary part of the lookup table, in the format [r, n1, n2]
# re_in_vec (np.float32): the real part of the vector to be multiplied with the coupling matrix
# im_in_vec (np.float32): the imaginary part of the vector to be multiplied with the coupling matrix
# re_result_vec (np.float32): the real part of the vector into which the result is written
# im_result_vec (np.float32): the imaginary part of the vector into which the result is written
linear_radial_lookup_source = """
#define BLOCKSIZE %i
#define NUMBER_OF_UNKNOWNS %i
#define MIN_RHO %f
#define LOOKUP_RESOLUTION %f
__global__ void coupling_kernel(const int *n, const float *m, const float *x_pos, const float *y_pos,
const float *re_lookup, const float *im_lookup, const float *re_in_vec,
const float *im_in_vec, float *re_result, float *im_result)
{
unsigned int i1 = blockIdx.x * blockDim.x + threadIdx.x;
if(i1 >= NUMBER_OF_UNKNOWNS) return;
const float x1 = x_pos[i1];
const float y1 = y_pos[i1];
const int n1 = n[i1];
const float m1 = m[i1];
re_result[i1] = 0.0;
im_result[i1] = 0.0;
for (int i2=0; i2<NUMBER_OF_UNKNOWNS; i2++)
{
float x21 = x1 - x_pos[i2];
float y21 = y1 - y_pos[i2];
const int n2 = n[i2];
const float m2 = m[i2];
float r = sqrt(x21*x21+y21*y21);
float phi = atan2(y21,x21);
int r_idx = (int) floor((r - MIN_RHO) / LOOKUP_RESOLUTION);
float w = (r - MIN_RHO) / LOOKUP_RESOLUTION - floor((r - MIN_RHO) / LOOKUP_RESOLUTION);
int idx = r_idx * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_pl_1 = (r_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
float re_si = re_lookup[idx] + w * (re_lookup[idx_pl_1] - re_lookup[idx]);
float im_si = im_lookup[idx] + w * (im_lookup[idx_pl_1] - im_lookup[idx]);
float re_eimphi = cosf((m2 - m1) * phi);
float im_eimphi = sinf((m2 - m1) * phi);
float re_w = re_eimphi * re_si - im_eimphi * im_si;
float im_w = im_eimphi * re_si + re_eimphi * im_si;
re_result[i1] += re_w * re_in_vec[i2] - im_w * im_in_vec[i2];
im_result[i1] += re_w * im_in_vec[i2] + im_w * re_in_vec[i2];
}
}"""
# This cuda kernel multiplies the coupling matrix to a vector. It is based on cubic Hermite spline interpolation of the
# lookup table.
#
# input arguments of the cuda kernel:
# n (np.uint32): n[i] contains the mutlipole multi-index with regard to self.l_max and self.m_max of the i-th
# entry of a system vector
# m (np.float32): m[i] contains the multipole order with regard to particle.l_max and particle.m_max
# x_pos (np.float32): x_pos[i] contains the respective particle x-position
# y_pos (np.float32): y_pos[i] contains the respective particle y-position
# re_lookup (np.float32): the real part of the lookup table, in the format [r, n1, n2]
# im_lookup (np.float32): the imaginary part of the lookup table, in the format [r, n1, n2]
# re_in_vec (np.float32): the real part of the vector to be multiplied with the coupling matrix
# im_in_vec (np.float32): the imaginary part of the vector to be multiplied with the coupling matrix
# re_result_vec (np.float32): the real part of the vector into which the result is written
# im_result_vec (np.float32): the imaginary part of the vector into which the result is written
cubic_radial_lookup_source = """
#define BLOCKSIZE %i
#define NUMBER_OF_UNKNOWNS %i
#define MIN_RHO %f
#define LOOKUP_RESOLUTION %f
__device__ float cubic_interpolation(float const w, float const lookup_imn1, float const lookup_i,
float const lookup_ipl1, float const lookup_ipl2)
{
return ((-w*w*w+2*w*w-w) * lookup_imn1 + (3*w*w*w-5*w*w+2) * lookup_i + (-3*w*w*w+4*w*w+w) * lookup_ipl1
+ (w*w*w-w*w) * lookup_ipl2) / 2;
}
__global__ void coupling_kernel(const int *n, const float *m, const float *x_pos, const float *y_pos,
const float *re_lookup, const float *im_lookup, const float *re_in_vec,
const float *im_in_vec, float *re_result, float *im_result)
{
unsigned int i1 = blockIdx.x * blockDim.x + threadIdx.x;
if(i1 >= NUMBER_OF_UNKNOWNS) return;
const float x1 = x_pos[i1];
const float y1 = y_pos[i1];
const int n1 = n[i1];
const float m1 = m[i1];
re_result[i1] = 0.0;
im_result[i1] = 0.0;
for (int i2=0; i2<NUMBER_OF_UNKNOWNS; i2++)
{
float x21 = x1 - x_pos[i2];
float y21 = y1 - y_pos[i2];
const int n2 = n[i2];
const float m2 = m[i2];
float r = sqrt(x21*x21+y21*y21);
float phi = atan2(y21,x21);
int r_idx = (int) floor((r - MIN_RHO) / LOOKUP_RESOLUTION);
float w = (r - MIN_RHO) / LOOKUP_RESOLUTION - floor((r - MIN_RHO) / LOOKUP_RESOLUTION);
int idx_mn1 = (r_idx - 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx = r_idx * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_pl1 = (r_idx + 1) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
int idx_pl2 = (r_idx + 2) * BLOCKSIZE * BLOCKSIZE + n1 * BLOCKSIZE + n2;
float re_si = cubic_interpolation(w, re_lookup[idx_mn1], re_lookup[idx], re_lookup[idx_pl1],
re_lookup[idx_pl2]);
float im_si = cubic_interpolation(w, im_lookup[idx_mn1], im_lookup[idx], im_lookup[idx_pl1],
im_lookup[idx_pl2]);
float re_eimphi = cosf((m2 - m1) * phi);
float im_eimphi = sinf((m2 - m1) * phi);
float re_w = re_eimphi * re_si - im_eimphi * im_si;
float im_w = im_eimphi * re_si + re_eimphi * im_si;
re_result[i1] += re_w * re_in_vec[i2] - im_w * im_in_vec[i2];
im_result[i1] += re_w * im_in_vec[i2] + im_w * re_in_vec[i2];
}
}"""
| 50.960591
| 121
| 0.607685
| 3,253
| 20,690
| 3.609591
| 0.047648
| 0.044285
| 0.027593
| 0.021802
| 0.946517
| 0.936382
| 0.935871
| 0.927184
| 0.891756
| 0.883069
| 0
| 0.042517
| 0.291783
| 20,690
| 405
| 122
| 51.08642
| 0.758821
| 0.276462
| 0
| 0.661355
| 0
| 0.055777
| 0.98489
| 0.077899
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.015936
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a285633b0cb7f37c4198b7cbbc1b305a27656951
| 533,150
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ncs5500_qos_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ncs5500_qos_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ncs5500_qos_oper.py
|
bopopescu/ACI
|
dd717bc74739eeed4747b3ea9e36b239580df5e1
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2020-07-22T04:04:44.000Z
|
2020-07-22T04:04:44.000Z
|
""" Cisco_IOS_XR_ncs5500_qos_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR ncs5500\-qos package operational data.
This module contains definitions
for the following management objects\:
platform\-qos\: DNX QoS EA operational data
Copyright (c) 2013\-2017 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class DnxQoseaShowAction(Enum):
"""
DnxQoseaShowAction (Enum Class)
Policer action type
.. data:: action_none = 0
None
.. data:: action_transmit = 1
Transmit
.. data:: action_drop = 2
Drop
.. data:: action_mark = 3
Mark
"""
action_none = Enum.YLeaf(0, "action-none")
action_transmit = Enum.YLeaf(1, "action-transmit")
action_drop = Enum.YLeaf(2, "action-drop")
action_mark = Enum.YLeaf(3, "action-mark")
class DnxQoseaShowHpLevel(Enum):
"""
DnxQoseaShowHpLevel (Enum Class)
Priority level
.. data:: high_priority_level1 = 0
High priority queue level 1
.. data:: high_priority_level2 = 1
High priority queue level 2
.. data:: high_priority_level3 = 2
High priority queue level 3
.. data:: high_priority_level4 = 3
High priority queue level 4
.. data:: high_priority_level5 = 4
High priority queue level 5
.. data:: high_priority_level6 = 5
High priority queue level 6
.. data:: high_priority_level7 = 6
High priority queue level 7
.. data:: unknown = 7
Unknown
"""
high_priority_level1 = Enum.YLeaf(0, "high-priority-level1")
high_priority_level2 = Enum.YLeaf(1, "high-priority-level2")
high_priority_level3 = Enum.YLeaf(2, "high-priority-level3")
high_priority_level4 = Enum.YLeaf(3, "high-priority-level4")
high_priority_level5 = Enum.YLeaf(4, "high-priority-level5")
high_priority_level6 = Enum.YLeaf(5, "high-priority-level6")
high_priority_level7 = Enum.YLeaf(6, "high-priority-level7")
unknown = Enum.YLeaf(7, "unknown")
class DnxQoseaShowIntfStatus(Enum):
"""
DnxQoseaShowIntfStatus (Enum Class)
Intf Status
.. data:: state_unknown = 0
State is unknown
.. data:: state_down = 1
State is Down
"""
state_unknown = Enum.YLeaf(0, "state-unknown")
state_down = Enum.YLeaf(1, "state-down")
class DnxQoseaShowLevel(Enum):
"""
DnxQoseaShowLevel (Enum Class)
Level type
.. data:: level1 = 0
QoS level1 class
.. data:: level2 = 1
QoS level2 class
.. data:: level3 = 2
QoS level3 class
.. data:: level4 = 3
QoS level4 class
.. data:: level5 = 4
QoS level5 class
"""
level1 = Enum.YLeaf(0, "level1")
level2 = Enum.YLeaf(1, "level2")
level3 = Enum.YLeaf(2, "level3")
level4 = Enum.YLeaf(3, "level4")
level5 = Enum.YLeaf(4, "level5")
class DnxQoseaShowMark(Enum):
"""
DnxQoseaShowMark (Enum Class)
Mark type
.. data:: mark_none = 0
None
.. data:: dscp = 1
DSCP
.. data:: precedence = 2
Precedence
.. data:: mpls_topmost = 3
MPLS topmost
.. data:: mpls_imposition = 4
MPLS imposition
.. data:: qos_group = 5
Qos group
.. data:: discard_class = 6
Discard class
.. data:: cos = 7
COS
.. data:: inner_cos = 8
Inner COS
.. data:: un_supported9 = 9
Unsupported type 9
.. data:: un_supported10 = 10
Unsupported type 10
.. data:: dscp_tunnel = 11
DSCP tunnel
.. data:: precedence_tunnel = 12
Precedence tunnel
.. data:: dei = 13
DEI
.. data:: dei_imposition = 14
DEI Imposition
"""
mark_none = Enum.YLeaf(0, "mark-none")
dscp = Enum.YLeaf(1, "dscp")
precedence = Enum.YLeaf(2, "precedence")
mpls_topmost = Enum.YLeaf(3, "mpls-topmost")
mpls_imposition = Enum.YLeaf(4, "mpls-imposition")
qos_group = Enum.YLeaf(5, "qos-group")
discard_class = Enum.YLeaf(6, "discard-class")
cos = Enum.YLeaf(7, "cos")
inner_cos = Enum.YLeaf(8, "inner-cos")
un_supported9 = Enum.YLeaf(9, "un-supported9")
un_supported10 = Enum.YLeaf(10, "un-supported10")
dscp_tunnel = Enum.YLeaf(11, "dscp-tunnel")
precedence_tunnel = Enum.YLeaf(12, "precedence-tunnel")
dei = Enum.YLeaf(13, "dei")
dei_imposition = Enum.YLeaf(14, "dei-imposition")
class DnxQoseaShowPolicyStatus(Enum):
"""
DnxQoseaShowPolicyStatus (Enum Class)
Status
.. data:: no_error = 0
No errors
.. data:: policy_in_reset = 1
QoS policy is reset
"""
no_error = Enum.YLeaf(0, "no-error")
policy_in_reset = Enum.YLeaf(1, "policy-in-reset")
class DnxQoseaShowQueue(Enum):
"""
DnxQoseaShowQueue (Enum Class)
Priority Queue Type
.. data:: low_priority_default_queue = 0
Low priority default queue
.. data:: low_priority_queue = 1
Low priority queue
.. data:: high_priority_queue = 2
High priority queue
.. data:: unknown_queue_type = 3
Queue priority unknown
"""
low_priority_default_queue = Enum.YLeaf(0, "low-priority-default-queue")
low_priority_queue = Enum.YLeaf(1, "low-priority-queue")
high_priority_queue = Enum.YLeaf(2, "high-priority-queue")
unknown_queue_type = Enum.YLeaf(3, "unknown-queue-type")
class DnxQoseaShowWred(Enum):
"""
DnxQoseaShowWred (Enum Class)
WRED type
.. data:: wred_cos = 0
WRED based on COS
.. data:: wred_dscp = 1
WRED based on DSCP
.. data:: wred_precedence = 2
WRED based on Precedence
.. data:: wred_discard_class = 3
WRED based on discard class
.. data:: wred_mpls_exp = 4
WRED based on MPLS EXP
.. data:: red_with_user_min_max = 5
RED with user defined min and max
.. data:: red_with_default_min_max = 6
RED with default min and max
.. data:: wred_invalid = 7
Invalid
"""
wred_cos = Enum.YLeaf(0, "wred-cos")
wred_dscp = Enum.YLeaf(1, "wred-dscp")
wred_precedence = Enum.YLeaf(2, "wred-precedence")
wred_discard_class = Enum.YLeaf(3, "wred-discard-class")
wred_mpls_exp = Enum.YLeaf(4, "wred-mpls-exp")
red_with_user_min_max = Enum.YLeaf(5, "red-with-user-min-max")
red_with_default_min_max = Enum.YLeaf(6, "red-with-default-min-max")
wred_invalid = Enum.YLeaf(7, "wred-invalid")
class PolicyParamUnit(Enum):
"""
PolicyParamUnit (Enum Class)
Policy param unit
.. data:: policy_param_unit_invalid = 0
policy param unit invalid
.. data:: policy_param_unit_bytes = 1
policy param unit bytes
.. data:: policy_param_unit_kbytes = 2
policy param unit kbytes
.. data:: policy_param_unit_mbytes = 3
policy param unit mbytes
.. data:: policy_param_unit_gbytes = 4
policy param unit gbytes
.. data:: policy_param_unit_bitsps = 5
policy param unit bitsps
.. data:: policy_param_unit_kbitsps = 6
policy param unit kbitsps
.. data:: policy_param_unit_mbitsps = 7
policy param unit mbitsps
.. data:: policy_param_unit_gbitsps = 8
policy param unit gbitsps
.. data:: policy_param_unit_cells_ps = 9
policy param unit cells ps
.. data:: policy_param_unit_packets_ps = 10
policy param unit packets ps
.. data:: policy_param_unit_us = 11
policy param unit us
.. data:: policy_param_unit_ms = 12
policy param unit ms
.. data:: policy_param_unit_seconds = 13
policy param unit seconds
.. data:: policy_param_unit_packets = 14
policy param unit packets
.. data:: policy_param_unit_cells = 15
policy param unit cells
.. data:: policy_param_unit_percent = 16
policy param unit percent
.. data:: policy_param_unit_per_thousand = 17
policy param unit per thousand
.. data:: policy_param_unit_per_million = 18
policy param unit per million
.. data:: policy_param_unit_hz = 19
policy param unit hz
.. data:: policy_param_unit_khz = 20
policy param unit khz
.. data:: policy_param_unit_mhz = 21
policy param unit mhz
.. data:: policy_param_unit_ratio = 22
policy param unit ratio
.. data:: policy_param_unit_max = 23
policy param unit max
"""
policy_param_unit_invalid = Enum.YLeaf(0, "policy-param-unit-invalid")
policy_param_unit_bytes = Enum.YLeaf(1, "policy-param-unit-bytes")
policy_param_unit_kbytes = Enum.YLeaf(2, "policy-param-unit-kbytes")
policy_param_unit_mbytes = Enum.YLeaf(3, "policy-param-unit-mbytes")
policy_param_unit_gbytes = Enum.YLeaf(4, "policy-param-unit-gbytes")
policy_param_unit_bitsps = Enum.YLeaf(5, "policy-param-unit-bitsps")
policy_param_unit_kbitsps = Enum.YLeaf(6, "policy-param-unit-kbitsps")
policy_param_unit_mbitsps = Enum.YLeaf(7, "policy-param-unit-mbitsps")
policy_param_unit_gbitsps = Enum.YLeaf(8, "policy-param-unit-gbitsps")
policy_param_unit_cells_ps = Enum.YLeaf(9, "policy-param-unit-cells-ps")
policy_param_unit_packets_ps = Enum.YLeaf(10, "policy-param-unit-packets-ps")
policy_param_unit_us = Enum.YLeaf(11, "policy-param-unit-us")
policy_param_unit_ms = Enum.YLeaf(12, "policy-param-unit-ms")
policy_param_unit_seconds = Enum.YLeaf(13, "policy-param-unit-seconds")
policy_param_unit_packets = Enum.YLeaf(14, "policy-param-unit-packets")
policy_param_unit_cells = Enum.YLeaf(15, "policy-param-unit-cells")
policy_param_unit_percent = Enum.YLeaf(16, "policy-param-unit-percent")
policy_param_unit_per_thousand = Enum.YLeaf(17, "policy-param-unit-per-thousand")
policy_param_unit_per_million = Enum.YLeaf(18, "policy-param-unit-per-million")
policy_param_unit_hz = Enum.YLeaf(19, "policy-param-unit-hz")
policy_param_unit_khz = Enum.YLeaf(20, "policy-param-unit-khz")
policy_param_unit_mhz = Enum.YLeaf(21, "policy-param-unit-mhz")
policy_param_unit_ratio = Enum.YLeaf(22, "policy-param-unit-ratio")
policy_param_unit_max = Enum.YLeaf(23, "policy-param-unit-max")
class QosPolicyAccountEnum(Enum):
"""
QosPolicyAccountEnum (Enum Class)
Qos policy account enum
.. data:: qos_serv_policy_no_ac_count_pref = 0
qos serv policy no ac count pref
.. data:: qos_serv_policy_ac_count_l2 = 1
qos serv policy ac count l2
.. data:: qos_serv_policy_no_ac_count_l2 = 2
qos serv policy no ac count l2
.. data:: qos_serv_policy_ac_count_user_def = 3
qos serv policy ac count user def
.. data:: qos_serv_policy_ac_count_l1 = 4
qos serv policy ac count l1
"""
qos_serv_policy_no_ac_count_pref = Enum.YLeaf(0, "qos-serv-policy-no-ac-count-pref")
qos_serv_policy_ac_count_l2 = Enum.YLeaf(1, "qos-serv-policy-ac-count-l2")
qos_serv_policy_no_ac_count_l2 = Enum.YLeaf(2, "qos-serv-policy-no-ac-count-l2")
qos_serv_policy_ac_count_user_def = Enum.YLeaf(3, "qos-serv-policy-ac-count-user-def")
qos_serv_policy_ac_count_l1 = Enum.YLeaf(4, "qos-serv-policy-ac-count-l1")
class PlatformQos(Entity):
"""
DNX QoS EA operational data
.. attribute:: nodes
List of nodes with platform specific QoS configuration
**type**\: :py:class:`Nodes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos, self).__init__()
self._top_entity = None
self.yang_name = "platform-qos"
self.yang_parent_name = "Cisco-IOS-XR-ncs5500-qos-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("nodes", ("nodes", PlatformQos.Nodes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict()
self.nodes = PlatformQos.Nodes()
self.nodes.parent = self
self._children_name_map["nodes"] = "nodes"
self._children_yang_names.add("nodes")
self._segment_path = lambda: "Cisco-IOS-XR-ncs5500-qos-oper:platform-qos"
class Nodes(Entity):
"""
List of nodes with platform specific QoS
configuration
.. attribute:: node
Node with platform specific QoS configuration
**type**\: list of :py:class:`Node <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes, self).__init__()
self.yang_name = "nodes"
self.yang_parent_name = "platform-qos"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("node", ("node", PlatformQos.Nodes.Node))])
self._leafs = OrderedDict()
self.node = YList(self)
self._segment_path = lambda: "nodes"
self._absolute_path = lambda: "Cisco-IOS-XR-ncs5500-qos-oper:platform-qos/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes, [], name, value)
class Node(Entity):
"""
Node with platform specific QoS configuration
.. attribute:: node_name (key)
Node name
**type**\: str
**pattern:** ([a\-zA\-Z0\-9\_]\*\\d+/){1,2}([a\-zA\-Z0\-9\_]\*\\d+)
.. attribute:: bundle_interfaces
QoS list of bundle interfaces
**type**\: :py:class:`BundleInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces>`
.. attribute:: interfaces
QoS list of interfaces
**type**\: :py:class:`Interfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces>`
.. attribute:: bundle_interface_singles
QoS list of bundle interfaces
**type**\: :py:class:`BundleInterfaceSingles <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles>`
.. attribute:: remote_interfaces
QoS list of remote interfaces
**type**\: :py:class:`RemoteInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.RemoteInterfaces>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node, self).__init__()
self.yang_name = "node"
self.yang_parent_name = "nodes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['node_name']
self._child_container_classes = OrderedDict([("bundle-interfaces", ("bundle_interfaces", PlatformQos.Nodes.Node.BundleInterfaces)), ("interfaces", ("interfaces", PlatformQos.Nodes.Node.Interfaces)), ("bundle-interface-singles", ("bundle_interface_singles", PlatformQos.Nodes.Node.BundleInterfaceSingles)), ("remote-interfaces", ("remote_interfaces", PlatformQos.Nodes.Node.RemoteInterfaces))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('node_name', YLeaf(YType.str, 'node-name')),
])
self.node_name = None
self.bundle_interfaces = PlatformQos.Nodes.Node.BundleInterfaces()
self.bundle_interfaces.parent = self
self._children_name_map["bundle_interfaces"] = "bundle-interfaces"
self._children_yang_names.add("bundle-interfaces")
self.interfaces = PlatformQos.Nodes.Node.Interfaces()
self.interfaces.parent = self
self._children_name_map["interfaces"] = "interfaces"
self._children_yang_names.add("interfaces")
self.bundle_interface_singles = PlatformQos.Nodes.Node.BundleInterfaceSingles()
self.bundle_interface_singles.parent = self
self._children_name_map["bundle_interface_singles"] = "bundle-interface-singles"
self._children_yang_names.add("bundle-interface-singles")
self.remote_interfaces = PlatformQos.Nodes.Node.RemoteInterfaces()
self.remote_interfaces.parent = self
self._children_name_map["remote_interfaces"] = "remote-interfaces"
self._children_yang_names.add("remote-interfaces")
self._segment_path = lambda: "node" + "[node-name='" + str(self.node_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ncs5500-qos-oper:platform-qos/nodes/%s" % self._segment_path()
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node, ['node_name'], name, value)
class BundleInterfaces(Entity):
"""
QoS list of bundle interfaces
.. attribute:: bundle_interface
QoS interface names
**type**\: list of :py:class:`BundleInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces, self).__init__()
self.yang_name = "bundle-interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("bundle-interface", ("bundle_interface", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface))])
self._leafs = OrderedDict()
self.bundle_interface = YList(self)
self._segment_path = lambda: "bundle-interfaces"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces, [], name, value)
class BundleInterface(Entity):
"""
QoS interface names
.. attribute:: interface_name
Bundle interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: qos_direction
The interface direction on which QoS is applied to
**type**\: str
.. attribute:: policy_details
Policy Details
**type**\: :py:class:`PolicyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.PolicyDetails>`
.. attribute:: member_interfaces
QoS list of member interfaces
**type**\: :py:class:`MemberInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces>`
.. attribute:: classes
QoS list of class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface, self).__init__()
self.yang_name = "bundle-interface"
self.yang_parent_name = "bundle-interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("policy-details", ("policy_details", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.PolicyDetails)), ("member-interfaces", ("member_interfaces", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces)), ("classes", ("classes", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
('npu_id', YLeaf(YType.int32, 'npu-id')),
('qos_direction', YLeaf(YType.str, 'qos-direction')),
])
self.interface_name = None
self.npu_id = None
self.qos_direction = None
self.policy_details = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.PolicyDetails()
self.policy_details.parent = self
self._children_name_map["policy_details"] = "policy-details"
self._children_yang_names.add("policy-details")
self.member_interfaces = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces()
self.member_interfaces.parent = self
self._children_name_map["member_interfaces"] = "member-interfaces"
self._children_yang_names.add("member-interfaces")
self.classes = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes()
self.classes.parent = self
self._children_name_map["classes"] = "classes"
self._children_yang_names.add("classes")
self._segment_path = lambda: "bundle-interface"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface, ['interface_name', 'npu_id', 'qos_direction'], name, value)
class PolicyDetails(Entity):
"""
Policy Details
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
InterfaceHandle
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_bandwidth_kbps
Interface Bandwidth (in kbps)
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: policy_name
Policy name
**type**\: str
**length:** 0..64
.. attribute:: total_number_of_classes
Number of Classes
**type**\: int
**range:** 0..65535
.. attribute:: voq_base_address
VOQ base address
**type**\: int
**range:** 0..4294967295
.. attribute:: voq_stats_handle
VOQ stats handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stats_accounting_type
QoS Statistics Accounting Type
**type**\: :py:class:`QosPolicyAccountEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.QosPolicyAccountEnum>`
.. attribute:: policy_status
Policy Status
**type**\: :py:class:`DnxQoseaShowPolicyStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowPolicyStatus>`
.. attribute:: interface_status
Interface Status
**type**\: :py:class:`DnxQoseaShowIntfStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowIntfStatus>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.PolicyDetails, self).__init__()
self.yang_name = "policy-details"
self.yang_parent_name = "bundle-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('npu_id', YLeaf(YType.uint32, 'npu-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('interface_bandwidth_kbps', YLeaf(YType.uint32, 'interface-bandwidth-kbps')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('total_number_of_classes', YLeaf(YType.uint16, 'total-number-of-classes')),
('voq_base_address', YLeaf(YType.uint32, 'voq-base-address')),
('voq_stats_handle', YLeaf(YType.uint64, 'voq-stats-handle')),
('stats_accounting_type', YLeaf(YType.enumeration, 'stats-accounting-type')),
('policy_status', YLeaf(YType.enumeration, 'policy-status')),
('interface_status', YLeaf(YType.enumeration, 'interface-status')),
])
self.npu_id = None
self.interface_handle = None
self.interface_bandwidth_kbps = None
self.policy_name = None
self.total_number_of_classes = None
self.voq_base_address = None
self.voq_stats_handle = None
self.stats_accounting_type = None
self.policy_status = None
self.interface_status = None
self._segment_path = lambda: "policy-details"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.PolicyDetails, ['npu_id', 'interface_handle', 'interface_bandwidth_kbps', 'policy_name', 'total_number_of_classes', 'voq_base_address', 'voq_stats_handle', 'stats_accounting_type', 'policy_status', 'interface_status'], name, value)
class MemberInterfaces(Entity):
"""
QoS list of member interfaces
.. attribute:: member_interface
QoS interface names
**type**\: list of :py:class:`MemberInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces, self).__init__()
self.yang_name = "member-interfaces"
self.yang_parent_name = "bundle-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("member-interface", ("member_interface", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface))])
self._leafs = OrderedDict()
self.member_interface = YList(self)
self._segment_path = lambda: "member-interfaces"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces, [], name, value)
class MemberInterface(Entity):
"""
QoS interface names
.. attribute:: interface_name (key)
Member interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: policy_details
Policy Details
**type**\: :py:class:`PolicyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.PolicyDetails>`
.. attribute:: classes
QoS list of class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface, self).__init__()
self.yang_name = "member-interface"
self.yang_parent_name = "member-interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([("policy-details", ("policy_details", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.PolicyDetails)), ("classes", ("classes", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.interface_name = None
self.policy_details = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.PolicyDetails()
self.policy_details.parent = self
self._children_name_map["policy_details"] = "policy-details"
self._children_yang_names.add("policy-details")
self.classes = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes()
self.classes.parent = self
self._children_name_map["classes"] = "classes"
self._children_yang_names.add("classes")
self._segment_path = lambda: "member-interface" + "[interface-name='" + str(self.interface_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface, ['interface_name'], name, value)
class PolicyDetails(Entity):
"""
Policy Details
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
InterfaceHandle
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_bandwidth_kbps
Interface Bandwidth (in kbps)
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: policy_name
Policy name
**type**\: str
**length:** 0..64
.. attribute:: total_number_of_classes
Number of Classes
**type**\: int
**range:** 0..65535
.. attribute:: voq_base_address
VOQ base address
**type**\: int
**range:** 0..4294967295
.. attribute:: voq_stats_handle
VOQ stats handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stats_accounting_type
QoS Statistics Accounting Type
**type**\: :py:class:`QosPolicyAccountEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.QosPolicyAccountEnum>`
.. attribute:: policy_status
Policy Status
**type**\: :py:class:`DnxQoseaShowPolicyStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowPolicyStatus>`
.. attribute:: interface_status
Interface Status
**type**\: :py:class:`DnxQoseaShowIntfStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowIntfStatus>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.PolicyDetails, self).__init__()
self.yang_name = "policy-details"
self.yang_parent_name = "member-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('npu_id', YLeaf(YType.uint32, 'npu-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('interface_bandwidth_kbps', YLeaf(YType.uint32, 'interface-bandwidth-kbps')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('total_number_of_classes', YLeaf(YType.uint16, 'total-number-of-classes')),
('voq_base_address', YLeaf(YType.uint32, 'voq-base-address')),
('voq_stats_handle', YLeaf(YType.uint64, 'voq-stats-handle')),
('stats_accounting_type', YLeaf(YType.enumeration, 'stats-accounting-type')),
('policy_status', YLeaf(YType.enumeration, 'policy-status')),
('interface_status', YLeaf(YType.enumeration, 'interface-status')),
])
self.npu_id = None
self.interface_handle = None
self.interface_bandwidth_kbps = None
self.policy_name = None
self.total_number_of_classes = None
self.voq_base_address = None
self.voq_stats_handle = None
self.stats_accounting_type = None
self.policy_status = None
self.interface_status = None
self._segment_path = lambda: "policy-details"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.PolicyDetails, ['npu_id', 'interface_handle', 'interface_bandwidth_kbps', 'policy_name', 'total_number_of_classes', 'voq_base_address', 'voq_stats_handle', 'stats_accounting_type', 'policy_status', 'interface_status'], name, value)
class Classes(Entity):
"""
QoS list of class names
.. attribute:: class_
QoS policy class
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes, self).__init__()
self.yang_name = "classes"
self.yang_parent_name = "member-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("class", ("class_", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class))])
self._leafs = OrderedDict()
self.class_ = YList(self)
self._segment_path = lambda: "classes"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes, [], name, value)
class Class(Entity):
"""
QoS policy class
.. attribute:: level_one_class_name (key)
QoS policy class name at level 1
**type**\: str
.. attribute:: level_two_class_name
QoS policy child class name at level 2
**type**\: str
.. attribute:: config_max_rate
Configured maximum rate
**type**\: :py:class:`ConfigMaxRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate>`
.. attribute:: config_min_rate
Configured minimum rate
**type**\: :py:class:`ConfigMinRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate>`
.. attribute:: config_queue_limit
Configured queue limit
**type**\: :py:class:`ConfigQueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit>`
.. attribute:: config_policer_average_rate
Configured policer average rate
**type**\: :py:class:`ConfigPolicerAverageRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate>`
.. attribute:: config_policer_peak_rate
Config policer peak rate
**type**\: :py:class:`ConfigPolicerPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate>`
.. attribute:: config_policer_conform_burst
Configured policer conform burst
**type**\: :py:class:`ConfigPolicerConformBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst>`
.. attribute:: config_policer_excess_burst
Configured policer excess burst
**type**\: :py:class:`ConfigPolicerExcessBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst>`
.. attribute:: conform_action
Conform action
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction>`
.. attribute:: exceed_action
Exceed action
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction>`
.. attribute:: violate_action
Violate action
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction>`
.. attribute:: class_level
Class level
**type**\: :py:class:`DnxQoseaShowLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowLevel>`
.. attribute:: egress_queue_id
Egress Queue ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: queue_type
Queue type
**type**\: :py:class:`DnxQoseaShowQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowQueue>`
.. attribute:: priority_level
Priority level
**type**\: :py:class:`DnxQoseaShowHpLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowHpLevel>`
.. attribute:: hardware_max_rate_kbps
Hardware maximum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_min_rate_kbps
Hardware minimum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: config_excess_bandwidth_percent
Configured excess bandwidth percentage
**type**\: int
**range:** 0..4294967295
**units**\: percentage
.. attribute:: config_excess_bandwidth_unit
Configured excess bandwidth unit
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_excess_bandwidth_weight
Hardware excess bandwidth weight
**type**\: int
**range:** 0..4294967295
.. attribute:: network_min_bandwidth_kbps
Network minimum Bandwith
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_queue_limit_bytes
Hardware queue limit in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: hardware_queue_limit_microseconds
Hardware queue limit in microseconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: microsecond
.. attribute:: policer_bucket_id
PolicerBucketID
**type**\: int
**range:** 0..4294967295
.. attribute:: policer_stats_handle
PolicerStatsHandle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: hardware_policer_average_rate_kbps
Hardware policer average in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_policer_peak_rate_kbps
Hardware policer peak rate
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_conform_burst_bytes
Hardware policer conform burst
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_excess_burst_bytes
Hardware policer excess burst
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_mark
IP mark
**type**\: list of :py:class:`IpMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.IpMark>`
.. attribute:: common_mark
Common mark
**type**\: list of :py:class:`CommonMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.CommonMark>`
.. attribute:: mpls_mark
MPLS mark
**type**\: list of :py:class:`MplsMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.MplsMark>`
.. attribute:: wred
WRED parameters
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "classes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['level_one_class_name']
self._child_container_classes = OrderedDict([("config-max-rate", ("config_max_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate)), ("config-min-rate", ("config_min_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate)), ("config-queue-limit", ("config_queue_limit", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit)), ("config-policer-average-rate", ("config_policer_average_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate)), ("config-policer-peak-rate", ("config_policer_peak_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate)), ("config-policer-conform-burst", ("config_policer_conform_burst", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst)), ("config-policer-excess-burst", ("config_policer_excess_burst", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst)), ("conform-action", ("conform_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction)), ("exceed-action", ("exceed_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction)), ("violate-action", ("violate_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction))])
self._child_list_classes = OrderedDict([("ip-mark", ("ip_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.IpMark)), ("common-mark", ("common_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.CommonMark)), ("mpls-mark", ("mpls_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.MplsMark)), ("wred", ("wred", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred))])
self._leafs = OrderedDict([
('level_one_class_name', YLeaf(YType.str, 'level-one-class-name')),
('level_two_class_name', YLeaf(YType.str, 'level-two-class-name')),
('class_level', YLeaf(YType.enumeration, 'class-level')),
('egress_queue_id', YLeaf(YType.int32, 'egress-queue-id')),
('queue_type', YLeaf(YType.enumeration, 'queue-type')),
('priority_level', YLeaf(YType.enumeration, 'priority-level')),
('hardware_max_rate_kbps', YLeaf(YType.uint32, 'hardware-max-rate-kbps')),
('hardware_min_rate_kbps', YLeaf(YType.uint32, 'hardware-min-rate-kbps')),
('config_excess_bandwidth_percent', YLeaf(YType.uint32, 'config-excess-bandwidth-percent')),
('config_excess_bandwidth_unit', YLeaf(YType.uint32, 'config-excess-bandwidth-unit')),
('hardware_excess_bandwidth_weight', YLeaf(YType.uint32, 'hardware-excess-bandwidth-weight')),
('network_min_bandwidth_kbps', YLeaf(YType.uint32, 'network-min-bandwidth-kbps')),
('hardware_queue_limit_bytes', YLeaf(YType.uint64, 'hardware-queue-limit-bytes')),
('hardware_queue_limit_microseconds', YLeaf(YType.uint64, 'hardware-queue-limit-microseconds')),
('policer_bucket_id', YLeaf(YType.uint32, 'policer-bucket-id')),
('policer_stats_handle', YLeaf(YType.uint64, 'policer-stats-handle')),
('hardware_policer_average_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-average-rate-kbps')),
('hardware_policer_peak_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-peak-rate-kbps')),
('hardware_policer_conform_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-conform-burst-bytes')),
('hardware_policer_excess_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-excess-burst-bytes')),
])
self.level_one_class_name = None
self.level_two_class_name = None
self.class_level = None
self.egress_queue_id = None
self.queue_type = None
self.priority_level = None
self.hardware_max_rate_kbps = None
self.hardware_min_rate_kbps = None
self.config_excess_bandwidth_percent = None
self.config_excess_bandwidth_unit = None
self.hardware_excess_bandwidth_weight = None
self.network_min_bandwidth_kbps = None
self.hardware_queue_limit_bytes = None
self.hardware_queue_limit_microseconds = None
self.policer_bucket_id = None
self.policer_stats_handle = None
self.hardware_policer_average_rate_kbps = None
self.hardware_policer_peak_rate_kbps = None
self.hardware_policer_conform_burst_bytes = None
self.hardware_policer_excess_burst_bytes = None
self.config_max_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate()
self.config_max_rate.parent = self
self._children_name_map["config_max_rate"] = "config-max-rate"
self._children_yang_names.add("config-max-rate")
self.config_min_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate()
self.config_min_rate.parent = self
self._children_name_map["config_min_rate"] = "config-min-rate"
self._children_yang_names.add("config-min-rate")
self.config_queue_limit = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit()
self.config_queue_limit.parent = self
self._children_name_map["config_queue_limit"] = "config-queue-limit"
self._children_yang_names.add("config-queue-limit")
self.config_policer_average_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate()
self.config_policer_average_rate.parent = self
self._children_name_map["config_policer_average_rate"] = "config-policer-average-rate"
self._children_yang_names.add("config-policer-average-rate")
self.config_policer_peak_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate()
self.config_policer_peak_rate.parent = self
self._children_name_map["config_policer_peak_rate"] = "config-policer-peak-rate"
self._children_yang_names.add("config-policer-peak-rate")
self.config_policer_conform_burst = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst()
self.config_policer_conform_burst.parent = self
self._children_name_map["config_policer_conform_burst"] = "config-policer-conform-burst"
self._children_yang_names.add("config-policer-conform-burst")
self.config_policer_excess_burst = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst()
self.config_policer_excess_burst.parent = self
self._children_name_map["config_policer_excess_burst"] = "config-policer-excess-burst"
self._children_yang_names.add("config-policer-excess-burst")
self.conform_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self._children_yang_names.add("conform-action")
self.exceed_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self._children_yang_names.add("exceed-action")
self.violate_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._children_yang_names.add("violate-action")
self.ip_mark = YList(self)
self.common_mark = YList(self)
self.mpls_mark = YList(self)
self.wred = YList(self)
self._segment_path = lambda: "class" + "[level-one-class-name='" + str(self.level_one_class_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class, ['level_one_class_name', 'level_two_class_name', 'class_level', 'egress_queue_id', 'queue_type', 'priority_level', 'hardware_max_rate_kbps', 'hardware_min_rate_kbps', 'config_excess_bandwidth_percent', 'config_excess_bandwidth_unit', 'hardware_excess_bandwidth_weight', 'network_min_bandwidth_kbps', 'hardware_queue_limit_bytes', 'hardware_queue_limit_microseconds', 'policer_bucket_id', 'policer_stats_handle', 'hardware_policer_average_rate_kbps', 'hardware_policer_peak_rate_kbps', 'hardware_policer_conform_burst_bytes', 'hardware_policer_excess_burst_bytes'], name, value)
class ConfigMaxRate(Entity):
"""
Configured maximum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate, self).__init__()
self.yang_name = "config-max-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate, ['policy_value', 'policy_unit'], name, value)
class ConfigMinRate(Entity):
"""
Configured minimum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate, self).__init__()
self.yang_name = "config-min-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate, ['policy_value', 'policy_unit'], name, value)
class ConfigQueueLimit(Entity):
"""
Configured queue limit
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit, self).__init__()
self.yang_name = "config-queue-limit"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-queue-limit"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerAverageRate(Entity):
"""
Configured policer average rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate, self).__init__()
self.yang_name = "config-policer-average-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-average-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerPeakRate(Entity):
"""
Config policer peak rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate, self).__init__()
self.yang_name = "config-policer-peak-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-peak-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerConformBurst(Entity):
"""
Configured policer conform burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst, self).__init__()
self.yang_name = "config-policer-conform-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-conform-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerExcessBurst(Entity):
"""
Configured policer excess burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst, self).__init__()
self.yang_name = "config-policer-excess-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-excess-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst, ['policy_value', 'policy_unit'], name, value)
class ConformAction(Entity):
"""
Conform action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "conform-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark, ['mark_type', 'mark_value'], name, value)
class ExceedAction(Entity):
"""
Exceed action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "exceed-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark, ['mark_type', 'mark_value'], name, value)
class ViolateAction(Entity):
"""
Violate action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "violate-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark, ['mark_type', 'mark_value'], name, value)
class IpMark(Entity):
"""
IP mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.IpMark, self).__init__()
self.yang_name = "ip-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "ip-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.IpMark, ['mark_type', 'mark_value'], name, value)
class CommonMark(Entity):
"""
Common mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.CommonMark, self).__init__()
self.yang_name = "common-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "common-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.CommonMark, ['mark_type', 'mark_value'], name, value)
class MplsMark(Entity):
"""
MPLS mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.MplsMark, self).__init__()
self.yang_name = "mpls-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mpls-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.MplsMark, ['mark_type', 'mark_value'], name, value)
class Wred(Entity):
"""
WRED parameters
.. attribute:: wred_match_value
WRED match values
**type**\: :py:class:`WredMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue>`
.. attribute:: config_min_threshold
Configured minimum threshold
**type**\: :py:class:`ConfigMinThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold>`
.. attribute:: config_max_threshold
Configured maximum threshold
**type**\: :py:class:`ConfigMaxThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold>`
.. attribute:: wred_match_type
WREDMatchType
**type**\: :py:class:`DnxQoseaShowWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowWred>`
.. attribute:: hardware_min_threshold_bytes
Hardware minimum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_max_threshold_bytes
Hardware maximum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: first_segment
First segment
**type**\: int
**range:** 0..65535
.. attribute:: segment_size
Segment size
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("wred-match-value", ("wred_match_value", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue)), ("config-min-threshold", ("config_min_threshold", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold)), ("config-max-threshold", ("config_max_threshold", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('wred_match_type', YLeaf(YType.enumeration, 'wred-match-type')),
('hardware_min_threshold_bytes', YLeaf(YType.uint32, 'hardware-min-threshold-bytes')),
('hardware_max_threshold_bytes', YLeaf(YType.uint32, 'hardware-max-threshold-bytes')),
('first_segment', YLeaf(YType.uint16, 'first-segment')),
('segment_size', YLeaf(YType.uint32, 'segment-size')),
])
self.wred_match_type = None
self.hardware_min_threshold_bytes = None
self.hardware_max_threshold_bytes = None
self.first_segment = None
self.segment_size = None
self.wred_match_value = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue()
self.wred_match_value.parent = self
self._children_name_map["wred_match_value"] = "wred-match-value"
self._children_yang_names.add("wred-match-value")
self.config_min_threshold = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold()
self.config_min_threshold.parent = self
self._children_name_map["config_min_threshold"] = "config-min-threshold"
self._children_yang_names.add("config-min-threshold")
self.config_max_threshold = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold()
self.config_max_threshold.parent = self
self._children_name_map["config_max_threshold"] = "config-max-threshold"
self._children_yang_names.add("config-max-threshold")
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred, ['wred_match_type', 'hardware_min_threshold_bytes', 'hardware_max_threshold_bytes', 'first_segment', 'segment_size'], name, value)
class WredMatchValue(Entity):
"""
WRED match values
.. attribute:: dnx_qosea_show_red_match_value
dnx qosea show red match value
**type**\: list of :py:class:`DnxQoseaShowRedMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue, self).__init__()
self.yang_name = "wred-match-value"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("dnx-qosea-show-red-match-value", ("dnx_qosea_show_red_match_value", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue))])
self._leafs = OrderedDict()
self.dnx_qosea_show_red_match_value = YList(self)
self._segment_path = lambda: "wred-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue, [], name, value)
class DnxQoseaShowRedMatchValue(Entity):
"""
dnx qosea show red match value
.. attribute:: range_start
Start value of a range
**type**\: int
**range:** 0..255
.. attribute:: range_end
End value of a range
**type**\: int
**range:** 0..255
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, self).__init__()
self.yang_name = "dnx-qosea-show-red-match-value"
self.yang_parent_name = "wred-match-value"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('range_start', YLeaf(YType.uint8, 'range-start')),
('range_end', YLeaf(YType.uint8, 'range-end')),
])
self.range_start = None
self.range_end = None
self._segment_path = lambda: "dnx-qosea-show-red-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, ['range_start', 'range_end'], name, value)
class ConfigMinThreshold(Entity):
"""
Configured minimum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold, self).__init__()
self.yang_name = "config-min-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold, ['policy_value', 'policy_unit'], name, value)
class ConfigMaxThreshold(Entity):
"""
Configured maximum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold, self).__init__()
self.yang_name = "config-max-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold, ['policy_value', 'policy_unit'], name, value)
class Classes(Entity):
"""
QoS list of class names
.. attribute:: class_
QoS policy class
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes, self).__init__()
self.yang_name = "classes"
self.yang_parent_name = "bundle-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("class", ("class_", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class))])
self._leafs = OrderedDict()
self.class_ = YList(self)
self._segment_path = lambda: "classes"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes, [], name, value)
class Class(Entity):
"""
QoS policy class
.. attribute:: level_one_class_name (key)
QoS policy class name at level 1
**type**\: str
.. attribute:: level_two_class_name
QoS policy child class name at level 2
**type**\: str
.. attribute:: config_max_rate
Configured maximum rate
**type**\: :py:class:`ConfigMaxRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMaxRate>`
.. attribute:: config_min_rate
Configured minimum rate
**type**\: :py:class:`ConfigMinRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMinRate>`
.. attribute:: config_queue_limit
Configured queue limit
**type**\: :py:class:`ConfigQueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigQueueLimit>`
.. attribute:: config_policer_average_rate
Configured policer average rate
**type**\: :py:class:`ConfigPolicerAverageRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerAverageRate>`
.. attribute:: config_policer_peak_rate
Config policer peak rate
**type**\: :py:class:`ConfigPolicerPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerPeakRate>`
.. attribute:: config_policer_conform_burst
Configured policer conform burst
**type**\: :py:class:`ConfigPolicerConformBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerConformBurst>`
.. attribute:: config_policer_excess_burst
Configured policer excess burst
**type**\: :py:class:`ConfigPolicerExcessBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerExcessBurst>`
.. attribute:: conform_action
Conform action
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction>`
.. attribute:: exceed_action
Exceed action
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction>`
.. attribute:: violate_action
Violate action
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction>`
.. attribute:: class_level
Class level
**type**\: :py:class:`DnxQoseaShowLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowLevel>`
.. attribute:: egress_queue_id
Egress Queue ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: queue_type
Queue type
**type**\: :py:class:`DnxQoseaShowQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowQueue>`
.. attribute:: priority_level
Priority level
**type**\: :py:class:`DnxQoseaShowHpLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowHpLevel>`
.. attribute:: hardware_max_rate_kbps
Hardware maximum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_min_rate_kbps
Hardware minimum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: config_excess_bandwidth_percent
Configured excess bandwidth percentage
**type**\: int
**range:** 0..4294967295
**units**\: percentage
.. attribute:: config_excess_bandwidth_unit
Configured excess bandwidth unit
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_excess_bandwidth_weight
Hardware excess bandwidth weight
**type**\: int
**range:** 0..4294967295
.. attribute:: network_min_bandwidth_kbps
Network minimum Bandwith
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_queue_limit_bytes
Hardware queue limit in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: hardware_queue_limit_microseconds
Hardware queue limit in microseconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: microsecond
.. attribute:: policer_bucket_id
PolicerBucketID
**type**\: int
**range:** 0..4294967295
.. attribute:: policer_stats_handle
PolicerStatsHandle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: hardware_policer_average_rate_kbps
Hardware policer average in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_policer_peak_rate_kbps
Hardware policer peak rate
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_conform_burst_bytes
Hardware policer conform burst
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_excess_burst_bytes
Hardware policer excess burst
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_mark
IP mark
**type**\: list of :py:class:`IpMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.IpMark>`
.. attribute:: common_mark
Common mark
**type**\: list of :py:class:`CommonMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.CommonMark>`
.. attribute:: mpls_mark
MPLS mark
**type**\: list of :py:class:`MplsMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.MplsMark>`
.. attribute:: wred
WRED parameters
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "classes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['level_one_class_name']
self._child_container_classes = OrderedDict([("config-max-rate", ("config_max_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMaxRate)), ("config-min-rate", ("config_min_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMinRate)), ("config-queue-limit", ("config_queue_limit", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigQueueLimit)), ("config-policer-average-rate", ("config_policer_average_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerAverageRate)), ("config-policer-peak-rate", ("config_policer_peak_rate", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerPeakRate)), ("config-policer-conform-burst", ("config_policer_conform_burst", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerConformBurst)), ("config-policer-excess-burst", ("config_policer_excess_burst", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerExcessBurst)), ("conform-action", ("conform_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction)), ("exceed-action", ("exceed_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction)), ("violate-action", ("violate_action", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction))])
self._child_list_classes = OrderedDict([("ip-mark", ("ip_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.IpMark)), ("common-mark", ("common_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.CommonMark)), ("mpls-mark", ("mpls_mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.MplsMark)), ("wred", ("wred", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred))])
self._leafs = OrderedDict([
('level_one_class_name', YLeaf(YType.str, 'level-one-class-name')),
('level_two_class_name', YLeaf(YType.str, 'level-two-class-name')),
('class_level', YLeaf(YType.enumeration, 'class-level')),
('egress_queue_id', YLeaf(YType.int32, 'egress-queue-id')),
('queue_type', YLeaf(YType.enumeration, 'queue-type')),
('priority_level', YLeaf(YType.enumeration, 'priority-level')),
('hardware_max_rate_kbps', YLeaf(YType.uint32, 'hardware-max-rate-kbps')),
('hardware_min_rate_kbps', YLeaf(YType.uint32, 'hardware-min-rate-kbps')),
('config_excess_bandwidth_percent', YLeaf(YType.uint32, 'config-excess-bandwidth-percent')),
('config_excess_bandwidth_unit', YLeaf(YType.uint32, 'config-excess-bandwidth-unit')),
('hardware_excess_bandwidth_weight', YLeaf(YType.uint32, 'hardware-excess-bandwidth-weight')),
('network_min_bandwidth_kbps', YLeaf(YType.uint32, 'network-min-bandwidth-kbps')),
('hardware_queue_limit_bytes', YLeaf(YType.uint64, 'hardware-queue-limit-bytes')),
('hardware_queue_limit_microseconds', YLeaf(YType.uint64, 'hardware-queue-limit-microseconds')),
('policer_bucket_id', YLeaf(YType.uint32, 'policer-bucket-id')),
('policer_stats_handle', YLeaf(YType.uint64, 'policer-stats-handle')),
('hardware_policer_average_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-average-rate-kbps')),
('hardware_policer_peak_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-peak-rate-kbps')),
('hardware_policer_conform_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-conform-burst-bytes')),
('hardware_policer_excess_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-excess-burst-bytes')),
])
self.level_one_class_name = None
self.level_two_class_name = None
self.class_level = None
self.egress_queue_id = None
self.queue_type = None
self.priority_level = None
self.hardware_max_rate_kbps = None
self.hardware_min_rate_kbps = None
self.config_excess_bandwidth_percent = None
self.config_excess_bandwidth_unit = None
self.hardware_excess_bandwidth_weight = None
self.network_min_bandwidth_kbps = None
self.hardware_queue_limit_bytes = None
self.hardware_queue_limit_microseconds = None
self.policer_bucket_id = None
self.policer_stats_handle = None
self.hardware_policer_average_rate_kbps = None
self.hardware_policer_peak_rate_kbps = None
self.hardware_policer_conform_burst_bytes = None
self.hardware_policer_excess_burst_bytes = None
self.config_max_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMaxRate()
self.config_max_rate.parent = self
self._children_name_map["config_max_rate"] = "config-max-rate"
self._children_yang_names.add("config-max-rate")
self.config_min_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMinRate()
self.config_min_rate.parent = self
self._children_name_map["config_min_rate"] = "config-min-rate"
self._children_yang_names.add("config-min-rate")
self.config_queue_limit = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigQueueLimit()
self.config_queue_limit.parent = self
self._children_name_map["config_queue_limit"] = "config-queue-limit"
self._children_yang_names.add("config-queue-limit")
self.config_policer_average_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerAverageRate()
self.config_policer_average_rate.parent = self
self._children_name_map["config_policer_average_rate"] = "config-policer-average-rate"
self._children_yang_names.add("config-policer-average-rate")
self.config_policer_peak_rate = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerPeakRate()
self.config_policer_peak_rate.parent = self
self._children_name_map["config_policer_peak_rate"] = "config-policer-peak-rate"
self._children_yang_names.add("config-policer-peak-rate")
self.config_policer_conform_burst = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerConformBurst()
self.config_policer_conform_burst.parent = self
self._children_name_map["config_policer_conform_burst"] = "config-policer-conform-burst"
self._children_yang_names.add("config-policer-conform-burst")
self.config_policer_excess_burst = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerExcessBurst()
self.config_policer_excess_burst.parent = self
self._children_name_map["config_policer_excess_burst"] = "config-policer-excess-burst"
self._children_yang_names.add("config-policer-excess-burst")
self.conform_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self._children_yang_names.add("conform-action")
self.exceed_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self._children_yang_names.add("exceed-action")
self.violate_action = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._children_yang_names.add("violate-action")
self.ip_mark = YList(self)
self.common_mark = YList(self)
self.mpls_mark = YList(self)
self.wred = YList(self)
self._segment_path = lambda: "class" + "[level-one-class-name='" + str(self.level_one_class_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class, ['level_one_class_name', 'level_two_class_name', 'class_level', 'egress_queue_id', 'queue_type', 'priority_level', 'hardware_max_rate_kbps', 'hardware_min_rate_kbps', 'config_excess_bandwidth_percent', 'config_excess_bandwidth_unit', 'hardware_excess_bandwidth_weight', 'network_min_bandwidth_kbps', 'hardware_queue_limit_bytes', 'hardware_queue_limit_microseconds', 'policer_bucket_id', 'policer_stats_handle', 'hardware_policer_average_rate_kbps', 'hardware_policer_peak_rate_kbps', 'hardware_policer_conform_burst_bytes', 'hardware_policer_excess_burst_bytes'], name, value)
class ConfigMaxRate(Entity):
"""
Configured maximum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMaxRate, self).__init__()
self.yang_name = "config-max-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMaxRate, ['policy_value', 'policy_unit'], name, value)
class ConfigMinRate(Entity):
"""
Configured minimum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMinRate, self).__init__()
self.yang_name = "config-min-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigMinRate, ['policy_value', 'policy_unit'], name, value)
class ConfigQueueLimit(Entity):
"""
Configured queue limit
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigQueueLimit, self).__init__()
self.yang_name = "config-queue-limit"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-queue-limit"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigQueueLimit, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerAverageRate(Entity):
"""
Configured policer average rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerAverageRate, self).__init__()
self.yang_name = "config-policer-average-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-average-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerAverageRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerPeakRate(Entity):
"""
Config policer peak rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerPeakRate, self).__init__()
self.yang_name = "config-policer-peak-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-peak-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerPeakRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerConformBurst(Entity):
"""
Configured policer conform burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerConformBurst, self).__init__()
self.yang_name = "config-policer-conform-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-conform-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerConformBurst, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerExcessBurst(Entity):
"""
Configured policer excess burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerExcessBurst, self).__init__()
self.yang_name = "config-policer-excess-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-excess-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConfigPolicerExcessBurst, ['policy_value', 'policy_unit'], name, value)
class ConformAction(Entity):
"""
Conform action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "conform-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ConformAction.Mark, ['mark_type', 'mark_value'], name, value)
class ExceedAction(Entity):
"""
Exceed action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "exceed-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ExceedAction.Mark, ['mark_type', 'mark_value'], name, value)
class ViolateAction(Entity):
"""
Violate action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "violate-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.ViolateAction.Mark, ['mark_type', 'mark_value'], name, value)
class IpMark(Entity):
"""
IP mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.IpMark, self).__init__()
self.yang_name = "ip-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "ip-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.IpMark, ['mark_type', 'mark_value'], name, value)
class CommonMark(Entity):
"""
Common mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.CommonMark, self).__init__()
self.yang_name = "common-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "common-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.CommonMark, ['mark_type', 'mark_value'], name, value)
class MplsMark(Entity):
"""
MPLS mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.MplsMark, self).__init__()
self.yang_name = "mpls-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mpls-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.MplsMark, ['mark_type', 'mark_value'], name, value)
class Wred(Entity):
"""
WRED parameters
.. attribute:: wred_match_value
WRED match values
**type**\: :py:class:`WredMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue>`
.. attribute:: config_min_threshold
Configured minimum threshold
**type**\: :py:class:`ConfigMinThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMinThreshold>`
.. attribute:: config_max_threshold
Configured maximum threshold
**type**\: :py:class:`ConfigMaxThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMaxThreshold>`
.. attribute:: wred_match_type
WREDMatchType
**type**\: :py:class:`DnxQoseaShowWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowWred>`
.. attribute:: hardware_min_threshold_bytes
Hardware minimum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_max_threshold_bytes
Hardware maximum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: first_segment
First segment
**type**\: int
**range:** 0..65535
.. attribute:: segment_size
Segment size
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("wred-match-value", ("wred_match_value", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue)), ("config-min-threshold", ("config_min_threshold", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMinThreshold)), ("config-max-threshold", ("config_max_threshold", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMaxThreshold))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('wred_match_type', YLeaf(YType.enumeration, 'wred-match-type')),
('hardware_min_threshold_bytes', YLeaf(YType.uint32, 'hardware-min-threshold-bytes')),
('hardware_max_threshold_bytes', YLeaf(YType.uint32, 'hardware-max-threshold-bytes')),
('first_segment', YLeaf(YType.uint16, 'first-segment')),
('segment_size', YLeaf(YType.uint32, 'segment-size')),
])
self.wred_match_type = None
self.hardware_min_threshold_bytes = None
self.hardware_max_threshold_bytes = None
self.first_segment = None
self.segment_size = None
self.wred_match_value = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue()
self.wred_match_value.parent = self
self._children_name_map["wred_match_value"] = "wred-match-value"
self._children_yang_names.add("wred-match-value")
self.config_min_threshold = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMinThreshold()
self.config_min_threshold.parent = self
self._children_name_map["config_min_threshold"] = "config-min-threshold"
self._children_yang_names.add("config-min-threshold")
self.config_max_threshold = PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMaxThreshold()
self.config_max_threshold.parent = self
self._children_name_map["config_max_threshold"] = "config-max-threshold"
self._children_yang_names.add("config-max-threshold")
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred, ['wred_match_type', 'hardware_min_threshold_bytes', 'hardware_max_threshold_bytes', 'first_segment', 'segment_size'], name, value)
class WredMatchValue(Entity):
"""
WRED match values
.. attribute:: dnx_qosea_show_red_match_value
dnx qosea show red match value
**type**\: list of :py:class:`DnxQoseaShowRedMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue, self).__init__()
self.yang_name = "wred-match-value"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("dnx-qosea-show-red-match-value", ("dnx_qosea_show_red_match_value", PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue))])
self._leafs = OrderedDict()
self.dnx_qosea_show_red_match_value = YList(self)
self._segment_path = lambda: "wred-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue, [], name, value)
class DnxQoseaShowRedMatchValue(Entity):
"""
dnx qosea show red match value
.. attribute:: range_start
Start value of a range
**type**\: int
**range:** 0..255
.. attribute:: range_end
End value of a range
**type**\: int
**range:** 0..255
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, self).__init__()
self.yang_name = "dnx-qosea-show-red-match-value"
self.yang_parent_name = "wred-match-value"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('range_start', YLeaf(YType.uint8, 'range-start')),
('range_end', YLeaf(YType.uint8, 'range-end')),
])
self.range_start = None
self.range_end = None
self._segment_path = lambda: "dnx-qosea-show-red-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, ['range_start', 'range_end'], name, value)
class ConfigMinThreshold(Entity):
"""
Configured minimum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMinThreshold, self).__init__()
self.yang_name = "config-min-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMinThreshold, ['policy_value', 'policy_unit'], name, value)
class ConfigMaxThreshold(Entity):
"""
Configured maximum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMaxThreshold, self).__init__()
self.yang_name = "config-max-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaces.BundleInterface.Classes.Class.Wred.ConfigMaxThreshold, ['policy_value', 'policy_unit'], name, value)
class Interfaces(Entity):
"""
QoS list of interfaces
.. attribute:: interface
QoS interface names
**type**\: list of :py:class:`Interface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces, self).__init__()
self.yang_name = "interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("interface", ("interface", PlatformQos.Nodes.Node.Interfaces.Interface))])
self._leafs = OrderedDict()
self.interface = YList(self)
self._segment_path = lambda: "interfaces"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces, [], name, value)
class Interface(Entity):
"""
QoS interface names
.. attribute:: interface_name (key)
The name of the interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: qos_direction
The interface direction on which QoS is applied to
**type**\: str
.. attribute:: policy_details
Policy Details
**type**\: :py:class:`PolicyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.PolicyDetails>`
.. attribute:: classes
QoS list of class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface, self).__init__()
self.yang_name = "interface"
self.yang_parent_name = "interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([("policy-details", ("policy_details", PlatformQos.Nodes.Node.Interfaces.Interface.PolicyDetails)), ("classes", ("classes", PlatformQos.Nodes.Node.Interfaces.Interface.Classes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
('qos_direction', YLeaf(YType.str, 'qos-direction')),
])
self.interface_name = None
self.qos_direction = None
self.policy_details = PlatformQos.Nodes.Node.Interfaces.Interface.PolicyDetails()
self.policy_details.parent = self
self._children_name_map["policy_details"] = "policy-details"
self._children_yang_names.add("policy-details")
self.classes = PlatformQos.Nodes.Node.Interfaces.Interface.Classes()
self.classes.parent = self
self._children_name_map["classes"] = "classes"
self._children_yang_names.add("classes")
self._segment_path = lambda: "interface" + "[interface-name='" + str(self.interface_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface, ['interface_name', 'qos_direction'], name, value)
class PolicyDetails(Entity):
"""
Policy Details
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
InterfaceHandle
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_bandwidth_kbps
Interface Bandwidth (in kbps)
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: policy_name
Policy name
**type**\: str
**length:** 0..64
.. attribute:: total_number_of_classes
Number of Classes
**type**\: int
**range:** 0..65535
.. attribute:: voq_base_address
VOQ base address
**type**\: int
**range:** 0..4294967295
.. attribute:: voq_stats_handle
VOQ stats handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stats_accounting_type
QoS Statistics Accounting Type
**type**\: :py:class:`QosPolicyAccountEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.QosPolicyAccountEnum>`
.. attribute:: policy_status
Policy Status
**type**\: :py:class:`DnxQoseaShowPolicyStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowPolicyStatus>`
.. attribute:: interface_status
Interface Status
**type**\: :py:class:`DnxQoseaShowIntfStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowIntfStatus>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.PolicyDetails, self).__init__()
self.yang_name = "policy-details"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('npu_id', YLeaf(YType.uint32, 'npu-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('interface_bandwidth_kbps', YLeaf(YType.uint32, 'interface-bandwidth-kbps')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('total_number_of_classes', YLeaf(YType.uint16, 'total-number-of-classes')),
('voq_base_address', YLeaf(YType.uint32, 'voq-base-address')),
('voq_stats_handle', YLeaf(YType.uint64, 'voq-stats-handle')),
('stats_accounting_type', YLeaf(YType.enumeration, 'stats-accounting-type')),
('policy_status', YLeaf(YType.enumeration, 'policy-status')),
('interface_status', YLeaf(YType.enumeration, 'interface-status')),
])
self.npu_id = None
self.interface_handle = None
self.interface_bandwidth_kbps = None
self.policy_name = None
self.total_number_of_classes = None
self.voq_base_address = None
self.voq_stats_handle = None
self.stats_accounting_type = None
self.policy_status = None
self.interface_status = None
self._segment_path = lambda: "policy-details"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.PolicyDetails, ['npu_id', 'interface_handle', 'interface_bandwidth_kbps', 'policy_name', 'total_number_of_classes', 'voq_base_address', 'voq_stats_handle', 'stats_accounting_type', 'policy_status', 'interface_status'], name, value)
class Classes(Entity):
"""
QoS list of class names
.. attribute:: class_
QoS policy class
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes, self).__init__()
self.yang_name = "classes"
self.yang_parent_name = "interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("class", ("class_", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class))])
self._leafs = OrderedDict()
self.class_ = YList(self)
self._segment_path = lambda: "classes"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes, [], name, value)
class Class(Entity):
"""
QoS policy class
.. attribute:: level_one_class_name (key)
QoS policy class name at level 1
**type**\: str
.. attribute:: level_two_class_name
QoS policy child class name at level 2
**type**\: str
.. attribute:: config_max_rate
Configured maximum rate
**type**\: :py:class:`ConfigMaxRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMaxRate>`
.. attribute:: config_min_rate
Configured minimum rate
**type**\: :py:class:`ConfigMinRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMinRate>`
.. attribute:: config_queue_limit
Configured queue limit
**type**\: :py:class:`ConfigQueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigQueueLimit>`
.. attribute:: config_policer_average_rate
Configured policer average rate
**type**\: :py:class:`ConfigPolicerAverageRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerAverageRate>`
.. attribute:: config_policer_peak_rate
Config policer peak rate
**type**\: :py:class:`ConfigPolicerPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerPeakRate>`
.. attribute:: config_policer_conform_burst
Configured policer conform burst
**type**\: :py:class:`ConfigPolicerConformBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerConformBurst>`
.. attribute:: config_policer_excess_burst
Configured policer excess burst
**type**\: :py:class:`ConfigPolicerExcessBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerExcessBurst>`
.. attribute:: conform_action
Conform action
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction>`
.. attribute:: exceed_action
Exceed action
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction>`
.. attribute:: violate_action
Violate action
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction>`
.. attribute:: class_level
Class level
**type**\: :py:class:`DnxQoseaShowLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowLevel>`
.. attribute:: egress_queue_id
Egress Queue ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: queue_type
Queue type
**type**\: :py:class:`DnxQoseaShowQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowQueue>`
.. attribute:: priority_level
Priority level
**type**\: :py:class:`DnxQoseaShowHpLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowHpLevel>`
.. attribute:: hardware_max_rate_kbps
Hardware maximum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_min_rate_kbps
Hardware minimum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: config_excess_bandwidth_percent
Configured excess bandwidth percentage
**type**\: int
**range:** 0..4294967295
**units**\: percentage
.. attribute:: config_excess_bandwidth_unit
Configured excess bandwidth unit
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_excess_bandwidth_weight
Hardware excess bandwidth weight
**type**\: int
**range:** 0..4294967295
.. attribute:: network_min_bandwidth_kbps
Network minimum Bandwith
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_queue_limit_bytes
Hardware queue limit in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: hardware_queue_limit_microseconds
Hardware queue limit in microseconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: microsecond
.. attribute:: policer_bucket_id
PolicerBucketID
**type**\: int
**range:** 0..4294967295
.. attribute:: policer_stats_handle
PolicerStatsHandle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: hardware_policer_average_rate_kbps
Hardware policer average in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_policer_peak_rate_kbps
Hardware policer peak rate
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_conform_burst_bytes
Hardware policer conform burst
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_excess_burst_bytes
Hardware policer excess burst
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_mark
IP mark
**type**\: list of :py:class:`IpMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.IpMark>`
.. attribute:: common_mark
Common mark
**type**\: list of :py:class:`CommonMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.CommonMark>`
.. attribute:: mpls_mark
MPLS mark
**type**\: list of :py:class:`MplsMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.MplsMark>`
.. attribute:: wred
WRED parameters
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "classes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['level_one_class_name']
self._child_container_classes = OrderedDict([("config-max-rate", ("config_max_rate", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMaxRate)), ("config-min-rate", ("config_min_rate", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMinRate)), ("config-queue-limit", ("config_queue_limit", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigQueueLimit)), ("config-policer-average-rate", ("config_policer_average_rate", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerAverageRate)), ("config-policer-peak-rate", ("config_policer_peak_rate", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerPeakRate)), ("config-policer-conform-burst", ("config_policer_conform_burst", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerConformBurst)), ("config-policer-excess-burst", ("config_policer_excess_burst", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerExcessBurst)), ("conform-action", ("conform_action", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction)), ("exceed-action", ("exceed_action", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction)), ("violate-action", ("violate_action", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction))])
self._child_list_classes = OrderedDict([("ip-mark", ("ip_mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.IpMark)), ("common-mark", ("common_mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.CommonMark)), ("mpls-mark", ("mpls_mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.MplsMark)), ("wred", ("wred", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred))])
self._leafs = OrderedDict([
('level_one_class_name', YLeaf(YType.str, 'level-one-class-name')),
('level_two_class_name', YLeaf(YType.str, 'level-two-class-name')),
('class_level', YLeaf(YType.enumeration, 'class-level')),
('egress_queue_id', YLeaf(YType.int32, 'egress-queue-id')),
('queue_type', YLeaf(YType.enumeration, 'queue-type')),
('priority_level', YLeaf(YType.enumeration, 'priority-level')),
('hardware_max_rate_kbps', YLeaf(YType.uint32, 'hardware-max-rate-kbps')),
('hardware_min_rate_kbps', YLeaf(YType.uint32, 'hardware-min-rate-kbps')),
('config_excess_bandwidth_percent', YLeaf(YType.uint32, 'config-excess-bandwidth-percent')),
('config_excess_bandwidth_unit', YLeaf(YType.uint32, 'config-excess-bandwidth-unit')),
('hardware_excess_bandwidth_weight', YLeaf(YType.uint32, 'hardware-excess-bandwidth-weight')),
('network_min_bandwidth_kbps', YLeaf(YType.uint32, 'network-min-bandwidth-kbps')),
('hardware_queue_limit_bytes', YLeaf(YType.uint64, 'hardware-queue-limit-bytes')),
('hardware_queue_limit_microseconds', YLeaf(YType.uint64, 'hardware-queue-limit-microseconds')),
('policer_bucket_id', YLeaf(YType.uint32, 'policer-bucket-id')),
('policer_stats_handle', YLeaf(YType.uint64, 'policer-stats-handle')),
('hardware_policer_average_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-average-rate-kbps')),
('hardware_policer_peak_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-peak-rate-kbps')),
('hardware_policer_conform_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-conform-burst-bytes')),
('hardware_policer_excess_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-excess-burst-bytes')),
])
self.level_one_class_name = None
self.level_two_class_name = None
self.class_level = None
self.egress_queue_id = None
self.queue_type = None
self.priority_level = None
self.hardware_max_rate_kbps = None
self.hardware_min_rate_kbps = None
self.config_excess_bandwidth_percent = None
self.config_excess_bandwidth_unit = None
self.hardware_excess_bandwidth_weight = None
self.network_min_bandwidth_kbps = None
self.hardware_queue_limit_bytes = None
self.hardware_queue_limit_microseconds = None
self.policer_bucket_id = None
self.policer_stats_handle = None
self.hardware_policer_average_rate_kbps = None
self.hardware_policer_peak_rate_kbps = None
self.hardware_policer_conform_burst_bytes = None
self.hardware_policer_excess_burst_bytes = None
self.config_max_rate = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMaxRate()
self.config_max_rate.parent = self
self._children_name_map["config_max_rate"] = "config-max-rate"
self._children_yang_names.add("config-max-rate")
self.config_min_rate = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMinRate()
self.config_min_rate.parent = self
self._children_name_map["config_min_rate"] = "config-min-rate"
self._children_yang_names.add("config-min-rate")
self.config_queue_limit = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigQueueLimit()
self.config_queue_limit.parent = self
self._children_name_map["config_queue_limit"] = "config-queue-limit"
self._children_yang_names.add("config-queue-limit")
self.config_policer_average_rate = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerAverageRate()
self.config_policer_average_rate.parent = self
self._children_name_map["config_policer_average_rate"] = "config-policer-average-rate"
self._children_yang_names.add("config-policer-average-rate")
self.config_policer_peak_rate = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerPeakRate()
self.config_policer_peak_rate.parent = self
self._children_name_map["config_policer_peak_rate"] = "config-policer-peak-rate"
self._children_yang_names.add("config-policer-peak-rate")
self.config_policer_conform_burst = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerConformBurst()
self.config_policer_conform_burst.parent = self
self._children_name_map["config_policer_conform_burst"] = "config-policer-conform-burst"
self._children_yang_names.add("config-policer-conform-burst")
self.config_policer_excess_burst = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerExcessBurst()
self.config_policer_excess_burst.parent = self
self._children_name_map["config_policer_excess_burst"] = "config-policer-excess-burst"
self._children_yang_names.add("config-policer-excess-burst")
self.conform_action = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self._children_yang_names.add("conform-action")
self.exceed_action = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self._children_yang_names.add("exceed-action")
self.violate_action = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._children_yang_names.add("violate-action")
self.ip_mark = YList(self)
self.common_mark = YList(self)
self.mpls_mark = YList(self)
self.wred = YList(self)
self._segment_path = lambda: "class" + "[level-one-class-name='" + str(self.level_one_class_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class, ['level_one_class_name', 'level_two_class_name', 'class_level', 'egress_queue_id', 'queue_type', 'priority_level', 'hardware_max_rate_kbps', 'hardware_min_rate_kbps', 'config_excess_bandwidth_percent', 'config_excess_bandwidth_unit', 'hardware_excess_bandwidth_weight', 'network_min_bandwidth_kbps', 'hardware_queue_limit_bytes', 'hardware_queue_limit_microseconds', 'policer_bucket_id', 'policer_stats_handle', 'hardware_policer_average_rate_kbps', 'hardware_policer_peak_rate_kbps', 'hardware_policer_conform_burst_bytes', 'hardware_policer_excess_burst_bytes'], name, value)
class ConfigMaxRate(Entity):
"""
Configured maximum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMaxRate, self).__init__()
self.yang_name = "config-max-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMaxRate, ['policy_value', 'policy_unit'], name, value)
class ConfigMinRate(Entity):
"""
Configured minimum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMinRate, self).__init__()
self.yang_name = "config-min-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigMinRate, ['policy_value', 'policy_unit'], name, value)
class ConfigQueueLimit(Entity):
"""
Configured queue limit
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigQueueLimit, self).__init__()
self.yang_name = "config-queue-limit"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-queue-limit"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigQueueLimit, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerAverageRate(Entity):
"""
Configured policer average rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerAverageRate, self).__init__()
self.yang_name = "config-policer-average-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-average-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerAverageRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerPeakRate(Entity):
"""
Config policer peak rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerPeakRate, self).__init__()
self.yang_name = "config-policer-peak-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-peak-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerPeakRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerConformBurst(Entity):
"""
Configured policer conform burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerConformBurst, self).__init__()
self.yang_name = "config-policer-conform-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-conform-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerConformBurst, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerExcessBurst(Entity):
"""
Configured policer excess burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerExcessBurst, self).__init__()
self.yang_name = "config-policer-excess-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-excess-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConfigPolicerExcessBurst, ['policy_value', 'policy_unit'], name, value)
class ConformAction(Entity):
"""
Conform action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "conform-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ConformAction.Mark, ['mark_type', 'mark_value'], name, value)
class ExceedAction(Entity):
"""
Exceed action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "exceed-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ExceedAction.Mark, ['mark_type', 'mark_value'], name, value)
class ViolateAction(Entity):
"""
Violate action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "violate-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.ViolateAction.Mark, ['mark_type', 'mark_value'], name, value)
class IpMark(Entity):
"""
IP mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.IpMark, self).__init__()
self.yang_name = "ip-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "ip-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.IpMark, ['mark_type', 'mark_value'], name, value)
class CommonMark(Entity):
"""
Common mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.CommonMark, self).__init__()
self.yang_name = "common-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "common-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.CommonMark, ['mark_type', 'mark_value'], name, value)
class MplsMark(Entity):
"""
MPLS mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.MplsMark, self).__init__()
self.yang_name = "mpls-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mpls-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.MplsMark, ['mark_type', 'mark_value'], name, value)
class Wred(Entity):
"""
WRED parameters
.. attribute:: wred_match_value
WRED match values
**type**\: :py:class:`WredMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue>`
.. attribute:: config_min_threshold
Configured minimum threshold
**type**\: :py:class:`ConfigMinThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMinThreshold>`
.. attribute:: config_max_threshold
Configured maximum threshold
**type**\: :py:class:`ConfigMaxThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMaxThreshold>`
.. attribute:: wred_match_type
WREDMatchType
**type**\: :py:class:`DnxQoseaShowWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowWred>`
.. attribute:: hardware_min_threshold_bytes
Hardware minimum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_max_threshold_bytes
Hardware maximum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: first_segment
First segment
**type**\: int
**range:** 0..65535
.. attribute:: segment_size
Segment size
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("wred-match-value", ("wred_match_value", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue)), ("config-min-threshold", ("config_min_threshold", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMinThreshold)), ("config-max-threshold", ("config_max_threshold", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMaxThreshold))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('wred_match_type', YLeaf(YType.enumeration, 'wred-match-type')),
('hardware_min_threshold_bytes', YLeaf(YType.uint32, 'hardware-min-threshold-bytes')),
('hardware_max_threshold_bytes', YLeaf(YType.uint32, 'hardware-max-threshold-bytes')),
('first_segment', YLeaf(YType.uint16, 'first-segment')),
('segment_size', YLeaf(YType.uint32, 'segment-size')),
])
self.wred_match_type = None
self.hardware_min_threshold_bytes = None
self.hardware_max_threshold_bytes = None
self.first_segment = None
self.segment_size = None
self.wred_match_value = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue()
self.wred_match_value.parent = self
self._children_name_map["wred_match_value"] = "wred-match-value"
self._children_yang_names.add("wred-match-value")
self.config_min_threshold = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMinThreshold()
self.config_min_threshold.parent = self
self._children_name_map["config_min_threshold"] = "config-min-threshold"
self._children_yang_names.add("config-min-threshold")
self.config_max_threshold = PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMaxThreshold()
self.config_max_threshold.parent = self
self._children_name_map["config_max_threshold"] = "config-max-threshold"
self._children_yang_names.add("config-max-threshold")
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred, ['wred_match_type', 'hardware_min_threshold_bytes', 'hardware_max_threshold_bytes', 'first_segment', 'segment_size'], name, value)
class WredMatchValue(Entity):
"""
WRED match values
.. attribute:: dnx_qosea_show_red_match_value
dnx qosea show red match value
**type**\: list of :py:class:`DnxQoseaShowRedMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue, self).__init__()
self.yang_name = "wred-match-value"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("dnx-qosea-show-red-match-value", ("dnx_qosea_show_red_match_value", PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue))])
self._leafs = OrderedDict()
self.dnx_qosea_show_red_match_value = YList(self)
self._segment_path = lambda: "wred-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue, [], name, value)
class DnxQoseaShowRedMatchValue(Entity):
"""
dnx qosea show red match value
.. attribute:: range_start
Start value of a range
**type**\: int
**range:** 0..255
.. attribute:: range_end
End value of a range
**type**\: int
**range:** 0..255
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, self).__init__()
self.yang_name = "dnx-qosea-show-red-match-value"
self.yang_parent_name = "wred-match-value"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('range_start', YLeaf(YType.uint8, 'range-start')),
('range_end', YLeaf(YType.uint8, 'range-end')),
])
self.range_start = None
self.range_end = None
self._segment_path = lambda: "dnx-qosea-show-red-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, ['range_start', 'range_end'], name, value)
class ConfigMinThreshold(Entity):
"""
Configured minimum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMinThreshold, self).__init__()
self.yang_name = "config-min-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMinThreshold, ['policy_value', 'policy_unit'], name, value)
class ConfigMaxThreshold(Entity):
"""
Configured maximum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMaxThreshold, self).__init__()
self.yang_name = "config-max-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.Interfaces.Interface.Classes.Class.Wred.ConfigMaxThreshold, ['policy_value', 'policy_unit'], name, value)
class BundleInterfaceSingles(Entity):
"""
QoS list of bundle interfaces
.. attribute:: bundle_interface_single
QoS interface names
**type**\: list of :py:class:`BundleInterfaceSingle <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles, self).__init__()
self.yang_name = "bundle-interface-singles"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("bundle-interface-single", ("bundle_interface_single", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle))])
self._leafs = OrderedDict()
self.bundle_interface_single = YList(self)
self._segment_path = lambda: "bundle-interface-singles"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles, [], name, value)
class BundleInterfaceSingle(Entity):
"""
QoS interface names
.. attribute:: interface_name (key)
Bundle interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: policy_details
Policy Details
**type**\: :py:class:`PolicyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.PolicyDetails>`
.. attribute:: member_interfaces
QoS list of member interfaces
**type**\: :py:class:`MemberInterfaces <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces>`
.. attribute:: classes
QoS list of class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle, self).__init__()
self.yang_name = "bundle-interface-single"
self.yang_parent_name = "bundle-interface-singles"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([("policy-details", ("policy_details", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.PolicyDetails)), ("member-interfaces", ("member_interfaces", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces)), ("classes", ("classes", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.interface_name = None
self.policy_details = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.PolicyDetails()
self.policy_details.parent = self
self._children_name_map["policy_details"] = "policy-details"
self._children_yang_names.add("policy-details")
self.member_interfaces = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces()
self.member_interfaces.parent = self
self._children_name_map["member_interfaces"] = "member-interfaces"
self._children_yang_names.add("member-interfaces")
self.classes = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes()
self.classes.parent = self
self._children_name_map["classes"] = "classes"
self._children_yang_names.add("classes")
self._segment_path = lambda: "bundle-interface-single" + "[interface-name='" + str(self.interface_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle, ['interface_name'], name, value)
class PolicyDetails(Entity):
"""
Policy Details
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
InterfaceHandle
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_bandwidth_kbps
Interface Bandwidth (in kbps)
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: policy_name
Policy name
**type**\: str
**length:** 0..64
.. attribute:: total_number_of_classes
Number of Classes
**type**\: int
**range:** 0..65535
.. attribute:: voq_base_address
VOQ base address
**type**\: int
**range:** 0..4294967295
.. attribute:: voq_stats_handle
VOQ stats handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stats_accounting_type
QoS Statistics Accounting Type
**type**\: :py:class:`QosPolicyAccountEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.QosPolicyAccountEnum>`
.. attribute:: policy_status
Policy Status
**type**\: :py:class:`DnxQoseaShowPolicyStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowPolicyStatus>`
.. attribute:: interface_status
Interface Status
**type**\: :py:class:`DnxQoseaShowIntfStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowIntfStatus>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.PolicyDetails, self).__init__()
self.yang_name = "policy-details"
self.yang_parent_name = "bundle-interface-single"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('npu_id', YLeaf(YType.uint32, 'npu-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('interface_bandwidth_kbps', YLeaf(YType.uint32, 'interface-bandwidth-kbps')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('total_number_of_classes', YLeaf(YType.uint16, 'total-number-of-classes')),
('voq_base_address', YLeaf(YType.uint32, 'voq-base-address')),
('voq_stats_handle', YLeaf(YType.uint64, 'voq-stats-handle')),
('stats_accounting_type', YLeaf(YType.enumeration, 'stats-accounting-type')),
('policy_status', YLeaf(YType.enumeration, 'policy-status')),
('interface_status', YLeaf(YType.enumeration, 'interface-status')),
])
self.npu_id = None
self.interface_handle = None
self.interface_bandwidth_kbps = None
self.policy_name = None
self.total_number_of_classes = None
self.voq_base_address = None
self.voq_stats_handle = None
self.stats_accounting_type = None
self.policy_status = None
self.interface_status = None
self._segment_path = lambda: "policy-details"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.PolicyDetails, ['npu_id', 'interface_handle', 'interface_bandwidth_kbps', 'policy_name', 'total_number_of_classes', 'voq_base_address', 'voq_stats_handle', 'stats_accounting_type', 'policy_status', 'interface_status'], name, value)
class MemberInterfaces(Entity):
"""
QoS list of member interfaces
.. attribute:: member_interface
QoS interface names
**type**\: list of :py:class:`MemberInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces, self).__init__()
self.yang_name = "member-interfaces"
self.yang_parent_name = "bundle-interface-single"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("member-interface", ("member_interface", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface))])
self._leafs = OrderedDict()
self.member_interface = YList(self)
self._segment_path = lambda: "member-interfaces"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces, [], name, value)
class MemberInterface(Entity):
"""
QoS interface names
.. attribute:: interface_name (key)
Member interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: policy_details
Policy Details
**type**\: :py:class:`PolicyDetails <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.PolicyDetails>`
.. attribute:: classes
QoS list of class names
**type**\: :py:class:`Classes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface, self).__init__()
self.yang_name = "member-interface"
self.yang_parent_name = "member-interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([("policy-details", ("policy_details", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.PolicyDetails)), ("classes", ("classes", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
])
self.interface_name = None
self.policy_details = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.PolicyDetails()
self.policy_details.parent = self
self._children_name_map["policy_details"] = "policy-details"
self._children_yang_names.add("policy-details")
self.classes = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes()
self.classes.parent = self
self._children_name_map["classes"] = "classes"
self._children_yang_names.add("classes")
self._segment_path = lambda: "member-interface" + "[interface-name='" + str(self.interface_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface, ['interface_name'], name, value)
class PolicyDetails(Entity):
"""
Policy Details
.. attribute:: npu_id
NPU ID
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_handle
InterfaceHandle
**type**\: int
**range:** 0..4294967295
.. attribute:: interface_bandwidth_kbps
Interface Bandwidth (in kbps)
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: policy_name
Policy name
**type**\: str
**length:** 0..64
.. attribute:: total_number_of_classes
Number of Classes
**type**\: int
**range:** 0..65535
.. attribute:: voq_base_address
VOQ base address
**type**\: int
**range:** 0..4294967295
.. attribute:: voq_stats_handle
VOQ stats handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: stats_accounting_type
QoS Statistics Accounting Type
**type**\: :py:class:`QosPolicyAccountEnum <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.QosPolicyAccountEnum>`
.. attribute:: policy_status
Policy Status
**type**\: :py:class:`DnxQoseaShowPolicyStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowPolicyStatus>`
.. attribute:: interface_status
Interface Status
**type**\: :py:class:`DnxQoseaShowIntfStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowIntfStatus>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.PolicyDetails, self).__init__()
self.yang_name = "policy-details"
self.yang_parent_name = "member-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('npu_id', YLeaf(YType.uint32, 'npu-id')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('interface_bandwidth_kbps', YLeaf(YType.uint32, 'interface-bandwidth-kbps')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('total_number_of_classes', YLeaf(YType.uint16, 'total-number-of-classes')),
('voq_base_address', YLeaf(YType.uint32, 'voq-base-address')),
('voq_stats_handle', YLeaf(YType.uint64, 'voq-stats-handle')),
('stats_accounting_type', YLeaf(YType.enumeration, 'stats-accounting-type')),
('policy_status', YLeaf(YType.enumeration, 'policy-status')),
('interface_status', YLeaf(YType.enumeration, 'interface-status')),
])
self.npu_id = None
self.interface_handle = None
self.interface_bandwidth_kbps = None
self.policy_name = None
self.total_number_of_classes = None
self.voq_base_address = None
self.voq_stats_handle = None
self.stats_accounting_type = None
self.policy_status = None
self.interface_status = None
self._segment_path = lambda: "policy-details"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.PolicyDetails, ['npu_id', 'interface_handle', 'interface_bandwidth_kbps', 'policy_name', 'total_number_of_classes', 'voq_base_address', 'voq_stats_handle', 'stats_accounting_type', 'policy_status', 'interface_status'], name, value)
class Classes(Entity):
"""
QoS list of class names
.. attribute:: class_
QoS policy class
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes, self).__init__()
self.yang_name = "classes"
self.yang_parent_name = "member-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("class", ("class_", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class))])
self._leafs = OrderedDict()
self.class_ = YList(self)
self._segment_path = lambda: "classes"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes, [], name, value)
class Class(Entity):
"""
QoS policy class
.. attribute:: level_one_class_name (key)
QoS policy class name at level 1
**type**\: str
.. attribute:: level_two_class_name
QoS policy child class name at level 2
**type**\: str
.. attribute:: config_max_rate
Configured maximum rate
**type**\: :py:class:`ConfigMaxRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate>`
.. attribute:: config_min_rate
Configured minimum rate
**type**\: :py:class:`ConfigMinRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate>`
.. attribute:: config_queue_limit
Configured queue limit
**type**\: :py:class:`ConfigQueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit>`
.. attribute:: config_policer_average_rate
Configured policer average rate
**type**\: :py:class:`ConfigPolicerAverageRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate>`
.. attribute:: config_policer_peak_rate
Config policer peak rate
**type**\: :py:class:`ConfigPolicerPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate>`
.. attribute:: config_policer_conform_burst
Configured policer conform burst
**type**\: :py:class:`ConfigPolicerConformBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst>`
.. attribute:: config_policer_excess_burst
Configured policer excess burst
**type**\: :py:class:`ConfigPolicerExcessBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst>`
.. attribute:: conform_action
Conform action
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction>`
.. attribute:: exceed_action
Exceed action
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction>`
.. attribute:: violate_action
Violate action
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction>`
.. attribute:: class_level
Class level
**type**\: :py:class:`DnxQoseaShowLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowLevel>`
.. attribute:: egress_queue_id
Egress Queue ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: queue_type
Queue type
**type**\: :py:class:`DnxQoseaShowQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowQueue>`
.. attribute:: priority_level
Priority level
**type**\: :py:class:`DnxQoseaShowHpLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowHpLevel>`
.. attribute:: hardware_max_rate_kbps
Hardware maximum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_min_rate_kbps
Hardware minimum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: config_excess_bandwidth_percent
Configured excess bandwidth percentage
**type**\: int
**range:** 0..4294967295
**units**\: percentage
.. attribute:: config_excess_bandwidth_unit
Configured excess bandwidth unit
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_excess_bandwidth_weight
Hardware excess bandwidth weight
**type**\: int
**range:** 0..4294967295
.. attribute:: network_min_bandwidth_kbps
Network minimum Bandwith
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_queue_limit_bytes
Hardware queue limit in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: hardware_queue_limit_microseconds
Hardware queue limit in microseconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: microsecond
.. attribute:: policer_bucket_id
PolicerBucketID
**type**\: int
**range:** 0..4294967295
.. attribute:: policer_stats_handle
PolicerStatsHandle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: hardware_policer_average_rate_kbps
Hardware policer average in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_policer_peak_rate_kbps
Hardware policer peak rate
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_conform_burst_bytes
Hardware policer conform burst
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_excess_burst_bytes
Hardware policer excess burst
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_mark
IP mark
**type**\: list of :py:class:`IpMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.IpMark>`
.. attribute:: common_mark
Common mark
**type**\: list of :py:class:`CommonMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.CommonMark>`
.. attribute:: mpls_mark
MPLS mark
**type**\: list of :py:class:`MplsMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.MplsMark>`
.. attribute:: wred
WRED parameters
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "classes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['level_one_class_name']
self._child_container_classes = OrderedDict([("config-max-rate", ("config_max_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate)), ("config-min-rate", ("config_min_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate)), ("config-queue-limit", ("config_queue_limit", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit)), ("config-policer-average-rate", ("config_policer_average_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate)), ("config-policer-peak-rate", ("config_policer_peak_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate)), ("config-policer-conform-burst", ("config_policer_conform_burst", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst)), ("config-policer-excess-burst", ("config_policer_excess_burst", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst)), ("conform-action", ("conform_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction)), ("exceed-action", ("exceed_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction)), ("violate-action", ("violate_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction))])
self._child_list_classes = OrderedDict([("ip-mark", ("ip_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.IpMark)), ("common-mark", ("common_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.CommonMark)), ("mpls-mark", ("mpls_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.MplsMark)), ("wred", ("wred", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred))])
self._leafs = OrderedDict([
('level_one_class_name', YLeaf(YType.str, 'level-one-class-name')),
('level_two_class_name', YLeaf(YType.str, 'level-two-class-name')),
('class_level', YLeaf(YType.enumeration, 'class-level')),
('egress_queue_id', YLeaf(YType.int32, 'egress-queue-id')),
('queue_type', YLeaf(YType.enumeration, 'queue-type')),
('priority_level', YLeaf(YType.enumeration, 'priority-level')),
('hardware_max_rate_kbps', YLeaf(YType.uint32, 'hardware-max-rate-kbps')),
('hardware_min_rate_kbps', YLeaf(YType.uint32, 'hardware-min-rate-kbps')),
('config_excess_bandwidth_percent', YLeaf(YType.uint32, 'config-excess-bandwidth-percent')),
('config_excess_bandwidth_unit', YLeaf(YType.uint32, 'config-excess-bandwidth-unit')),
('hardware_excess_bandwidth_weight', YLeaf(YType.uint32, 'hardware-excess-bandwidth-weight')),
('network_min_bandwidth_kbps', YLeaf(YType.uint32, 'network-min-bandwidth-kbps')),
('hardware_queue_limit_bytes', YLeaf(YType.uint64, 'hardware-queue-limit-bytes')),
('hardware_queue_limit_microseconds', YLeaf(YType.uint64, 'hardware-queue-limit-microseconds')),
('policer_bucket_id', YLeaf(YType.uint32, 'policer-bucket-id')),
('policer_stats_handle', YLeaf(YType.uint64, 'policer-stats-handle')),
('hardware_policer_average_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-average-rate-kbps')),
('hardware_policer_peak_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-peak-rate-kbps')),
('hardware_policer_conform_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-conform-burst-bytes')),
('hardware_policer_excess_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-excess-burst-bytes')),
])
self.level_one_class_name = None
self.level_two_class_name = None
self.class_level = None
self.egress_queue_id = None
self.queue_type = None
self.priority_level = None
self.hardware_max_rate_kbps = None
self.hardware_min_rate_kbps = None
self.config_excess_bandwidth_percent = None
self.config_excess_bandwidth_unit = None
self.hardware_excess_bandwidth_weight = None
self.network_min_bandwidth_kbps = None
self.hardware_queue_limit_bytes = None
self.hardware_queue_limit_microseconds = None
self.policer_bucket_id = None
self.policer_stats_handle = None
self.hardware_policer_average_rate_kbps = None
self.hardware_policer_peak_rate_kbps = None
self.hardware_policer_conform_burst_bytes = None
self.hardware_policer_excess_burst_bytes = None
self.config_max_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate()
self.config_max_rate.parent = self
self._children_name_map["config_max_rate"] = "config-max-rate"
self._children_yang_names.add("config-max-rate")
self.config_min_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate()
self.config_min_rate.parent = self
self._children_name_map["config_min_rate"] = "config-min-rate"
self._children_yang_names.add("config-min-rate")
self.config_queue_limit = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit()
self.config_queue_limit.parent = self
self._children_name_map["config_queue_limit"] = "config-queue-limit"
self._children_yang_names.add("config-queue-limit")
self.config_policer_average_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate()
self.config_policer_average_rate.parent = self
self._children_name_map["config_policer_average_rate"] = "config-policer-average-rate"
self._children_yang_names.add("config-policer-average-rate")
self.config_policer_peak_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate()
self.config_policer_peak_rate.parent = self
self._children_name_map["config_policer_peak_rate"] = "config-policer-peak-rate"
self._children_yang_names.add("config-policer-peak-rate")
self.config_policer_conform_burst = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst()
self.config_policer_conform_burst.parent = self
self._children_name_map["config_policer_conform_burst"] = "config-policer-conform-burst"
self._children_yang_names.add("config-policer-conform-burst")
self.config_policer_excess_burst = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst()
self.config_policer_excess_burst.parent = self
self._children_name_map["config_policer_excess_burst"] = "config-policer-excess-burst"
self._children_yang_names.add("config-policer-excess-burst")
self.conform_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self._children_yang_names.add("conform-action")
self.exceed_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self._children_yang_names.add("exceed-action")
self.violate_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._children_yang_names.add("violate-action")
self.ip_mark = YList(self)
self.common_mark = YList(self)
self.mpls_mark = YList(self)
self.wred = YList(self)
self._segment_path = lambda: "class" + "[level-one-class-name='" + str(self.level_one_class_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class, ['level_one_class_name', 'level_two_class_name', 'class_level', 'egress_queue_id', 'queue_type', 'priority_level', 'hardware_max_rate_kbps', 'hardware_min_rate_kbps', 'config_excess_bandwidth_percent', 'config_excess_bandwidth_unit', 'hardware_excess_bandwidth_weight', 'network_min_bandwidth_kbps', 'hardware_queue_limit_bytes', 'hardware_queue_limit_microseconds', 'policer_bucket_id', 'policer_stats_handle', 'hardware_policer_average_rate_kbps', 'hardware_policer_peak_rate_kbps', 'hardware_policer_conform_burst_bytes', 'hardware_policer_excess_burst_bytes'], name, value)
class ConfigMaxRate(Entity):
"""
Configured maximum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate, self).__init__()
self.yang_name = "config-max-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMaxRate, ['policy_value', 'policy_unit'], name, value)
class ConfigMinRate(Entity):
"""
Configured minimum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate, self).__init__()
self.yang_name = "config-min-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigMinRate, ['policy_value', 'policy_unit'], name, value)
class ConfigQueueLimit(Entity):
"""
Configured queue limit
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit, self).__init__()
self.yang_name = "config-queue-limit"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-queue-limit"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigQueueLimit, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerAverageRate(Entity):
"""
Configured policer average rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate, self).__init__()
self.yang_name = "config-policer-average-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-average-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerAverageRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerPeakRate(Entity):
"""
Config policer peak rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate, self).__init__()
self.yang_name = "config-policer-peak-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-peak-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerPeakRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerConformBurst(Entity):
"""
Configured policer conform burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst, self).__init__()
self.yang_name = "config-policer-conform-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-conform-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerConformBurst, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerExcessBurst(Entity):
"""
Configured policer excess burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst, self).__init__()
self.yang_name = "config-policer-excess-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-excess-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConfigPolicerExcessBurst, ['policy_value', 'policy_unit'], name, value)
class ConformAction(Entity):
"""
Conform action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "conform-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ConformAction.Mark, ['mark_type', 'mark_value'], name, value)
class ExceedAction(Entity):
"""
Exceed action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "exceed-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ExceedAction.Mark, ['mark_type', 'mark_value'], name, value)
class ViolateAction(Entity):
"""
Violate action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "violate-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.ViolateAction.Mark, ['mark_type', 'mark_value'], name, value)
class IpMark(Entity):
"""
IP mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.IpMark, self).__init__()
self.yang_name = "ip-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "ip-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.IpMark, ['mark_type', 'mark_value'], name, value)
class CommonMark(Entity):
"""
Common mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.CommonMark, self).__init__()
self.yang_name = "common-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "common-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.CommonMark, ['mark_type', 'mark_value'], name, value)
class MplsMark(Entity):
"""
MPLS mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.MplsMark, self).__init__()
self.yang_name = "mpls-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mpls-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.MplsMark, ['mark_type', 'mark_value'], name, value)
class Wred(Entity):
"""
WRED parameters
.. attribute:: wred_match_value
WRED match values
**type**\: :py:class:`WredMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue>`
.. attribute:: config_min_threshold
Configured minimum threshold
**type**\: :py:class:`ConfigMinThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold>`
.. attribute:: config_max_threshold
Configured maximum threshold
**type**\: :py:class:`ConfigMaxThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold>`
.. attribute:: wred_match_type
WREDMatchType
**type**\: :py:class:`DnxQoseaShowWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowWred>`
.. attribute:: hardware_min_threshold_bytes
Hardware minimum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_max_threshold_bytes
Hardware maximum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: first_segment
First segment
**type**\: int
**range:** 0..65535
.. attribute:: segment_size
Segment size
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("wred-match-value", ("wred_match_value", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue)), ("config-min-threshold", ("config_min_threshold", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold)), ("config-max-threshold", ("config_max_threshold", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('wred_match_type', YLeaf(YType.enumeration, 'wred-match-type')),
('hardware_min_threshold_bytes', YLeaf(YType.uint32, 'hardware-min-threshold-bytes')),
('hardware_max_threshold_bytes', YLeaf(YType.uint32, 'hardware-max-threshold-bytes')),
('first_segment', YLeaf(YType.uint16, 'first-segment')),
('segment_size', YLeaf(YType.uint32, 'segment-size')),
])
self.wred_match_type = None
self.hardware_min_threshold_bytes = None
self.hardware_max_threshold_bytes = None
self.first_segment = None
self.segment_size = None
self.wred_match_value = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue()
self.wred_match_value.parent = self
self._children_name_map["wred_match_value"] = "wred-match-value"
self._children_yang_names.add("wred-match-value")
self.config_min_threshold = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold()
self.config_min_threshold.parent = self
self._children_name_map["config_min_threshold"] = "config-min-threshold"
self._children_yang_names.add("config-min-threshold")
self.config_max_threshold = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold()
self.config_max_threshold.parent = self
self._children_name_map["config_max_threshold"] = "config-max-threshold"
self._children_yang_names.add("config-max-threshold")
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred, ['wred_match_type', 'hardware_min_threshold_bytes', 'hardware_max_threshold_bytes', 'first_segment', 'segment_size'], name, value)
class WredMatchValue(Entity):
"""
WRED match values
.. attribute:: dnx_qosea_show_red_match_value
dnx qosea show red match value
**type**\: list of :py:class:`DnxQoseaShowRedMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue, self).__init__()
self.yang_name = "wred-match-value"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("dnx-qosea-show-red-match-value", ("dnx_qosea_show_red_match_value", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue))])
self._leafs = OrderedDict()
self.dnx_qosea_show_red_match_value = YList(self)
self._segment_path = lambda: "wred-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue, [], name, value)
class DnxQoseaShowRedMatchValue(Entity):
"""
dnx qosea show red match value
.. attribute:: range_start
Start value of a range
**type**\: int
**range:** 0..255
.. attribute:: range_end
End value of a range
**type**\: int
**range:** 0..255
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, self).__init__()
self.yang_name = "dnx-qosea-show-red-match-value"
self.yang_parent_name = "wred-match-value"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('range_start', YLeaf(YType.uint8, 'range-start')),
('range_end', YLeaf(YType.uint8, 'range-end')),
])
self.range_start = None
self.range_end = None
self._segment_path = lambda: "dnx-qosea-show-red-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, ['range_start', 'range_end'], name, value)
class ConfigMinThreshold(Entity):
"""
Configured minimum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold, self).__init__()
self.yang_name = "config-min-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMinThreshold, ['policy_value', 'policy_unit'], name, value)
class ConfigMaxThreshold(Entity):
"""
Configured maximum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold, self).__init__()
self.yang_name = "config-max-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.MemberInterfaces.MemberInterface.Classes.Class.Wred.ConfigMaxThreshold, ['policy_value', 'policy_unit'], name, value)
class Classes(Entity):
"""
QoS list of class names
.. attribute:: class_
QoS policy class
**type**\: list of :py:class:`Class <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes, self).__init__()
self.yang_name = "classes"
self.yang_parent_name = "bundle-interface-single"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("class", ("class_", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class))])
self._leafs = OrderedDict()
self.class_ = YList(self)
self._segment_path = lambda: "classes"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes, [], name, value)
class Class(Entity):
"""
QoS policy class
.. attribute:: level_one_class_name (key)
QoS policy class name at level 1
**type**\: str
.. attribute:: level_two_class_name
QoS policy child class name at level 2
**type**\: str
.. attribute:: config_max_rate
Configured maximum rate
**type**\: :py:class:`ConfigMaxRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMaxRate>`
.. attribute:: config_min_rate
Configured minimum rate
**type**\: :py:class:`ConfigMinRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMinRate>`
.. attribute:: config_queue_limit
Configured queue limit
**type**\: :py:class:`ConfigQueueLimit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigQueueLimit>`
.. attribute:: config_policer_average_rate
Configured policer average rate
**type**\: :py:class:`ConfigPolicerAverageRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerAverageRate>`
.. attribute:: config_policer_peak_rate
Config policer peak rate
**type**\: :py:class:`ConfigPolicerPeakRate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerPeakRate>`
.. attribute:: config_policer_conform_burst
Configured policer conform burst
**type**\: :py:class:`ConfigPolicerConformBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerConformBurst>`
.. attribute:: config_policer_excess_burst
Configured policer excess burst
**type**\: :py:class:`ConfigPolicerExcessBurst <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerExcessBurst>`
.. attribute:: conform_action
Conform action
**type**\: :py:class:`ConformAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction>`
.. attribute:: exceed_action
Exceed action
**type**\: :py:class:`ExceedAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction>`
.. attribute:: violate_action
Violate action
**type**\: :py:class:`ViolateAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction>`
.. attribute:: class_level
Class level
**type**\: :py:class:`DnxQoseaShowLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowLevel>`
.. attribute:: egress_queue_id
Egress Queue ID
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: queue_type
Queue type
**type**\: :py:class:`DnxQoseaShowQueue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowQueue>`
.. attribute:: priority_level
Priority level
**type**\: :py:class:`DnxQoseaShowHpLevel <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowHpLevel>`
.. attribute:: hardware_max_rate_kbps
Hardware maximum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_min_rate_kbps
Hardware minimum rate in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: config_excess_bandwidth_percent
Configured excess bandwidth percentage
**type**\: int
**range:** 0..4294967295
**units**\: percentage
.. attribute:: config_excess_bandwidth_unit
Configured excess bandwidth unit
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_excess_bandwidth_weight
Hardware excess bandwidth weight
**type**\: int
**range:** 0..4294967295
.. attribute:: network_min_bandwidth_kbps
Network minimum Bandwith
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_queue_limit_bytes
Hardware queue limit in bytes
**type**\: int
**range:** 0..18446744073709551615
**units**\: byte
.. attribute:: hardware_queue_limit_microseconds
Hardware queue limit in microseconds
**type**\: int
**range:** 0..18446744073709551615
**units**\: microsecond
.. attribute:: policer_bucket_id
PolicerBucketID
**type**\: int
**range:** 0..4294967295
.. attribute:: policer_stats_handle
PolicerStatsHandle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: hardware_policer_average_rate_kbps
Hardware policer average in kbps
**type**\: int
**range:** 0..4294967295
**units**\: kbit/s
.. attribute:: hardware_policer_peak_rate_kbps
Hardware policer peak rate
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_conform_burst_bytes
Hardware policer conform burst
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_policer_excess_burst_bytes
Hardware policer excess burst
**type**\: int
**range:** 0..4294967295
.. attribute:: ip_mark
IP mark
**type**\: list of :py:class:`IpMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.IpMark>`
.. attribute:: common_mark
Common mark
**type**\: list of :py:class:`CommonMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.CommonMark>`
.. attribute:: mpls_mark
MPLS mark
**type**\: list of :py:class:`MplsMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.MplsMark>`
.. attribute:: wred
WRED parameters
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class, self).__init__()
self.yang_name = "class"
self.yang_parent_name = "classes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['level_one_class_name']
self._child_container_classes = OrderedDict([("config-max-rate", ("config_max_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMaxRate)), ("config-min-rate", ("config_min_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMinRate)), ("config-queue-limit", ("config_queue_limit", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigQueueLimit)), ("config-policer-average-rate", ("config_policer_average_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerAverageRate)), ("config-policer-peak-rate", ("config_policer_peak_rate", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerPeakRate)), ("config-policer-conform-burst", ("config_policer_conform_burst", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerConformBurst)), ("config-policer-excess-burst", ("config_policer_excess_burst", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerExcessBurst)), ("conform-action", ("conform_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction)), ("exceed-action", ("exceed_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction)), ("violate-action", ("violate_action", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction))])
self._child_list_classes = OrderedDict([("ip-mark", ("ip_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.IpMark)), ("common-mark", ("common_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.CommonMark)), ("mpls-mark", ("mpls_mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.MplsMark)), ("wred", ("wred", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred))])
self._leafs = OrderedDict([
('level_one_class_name', YLeaf(YType.str, 'level-one-class-name')),
('level_two_class_name', YLeaf(YType.str, 'level-two-class-name')),
('class_level', YLeaf(YType.enumeration, 'class-level')),
('egress_queue_id', YLeaf(YType.int32, 'egress-queue-id')),
('queue_type', YLeaf(YType.enumeration, 'queue-type')),
('priority_level', YLeaf(YType.enumeration, 'priority-level')),
('hardware_max_rate_kbps', YLeaf(YType.uint32, 'hardware-max-rate-kbps')),
('hardware_min_rate_kbps', YLeaf(YType.uint32, 'hardware-min-rate-kbps')),
('config_excess_bandwidth_percent', YLeaf(YType.uint32, 'config-excess-bandwidth-percent')),
('config_excess_bandwidth_unit', YLeaf(YType.uint32, 'config-excess-bandwidth-unit')),
('hardware_excess_bandwidth_weight', YLeaf(YType.uint32, 'hardware-excess-bandwidth-weight')),
('network_min_bandwidth_kbps', YLeaf(YType.uint32, 'network-min-bandwidth-kbps')),
('hardware_queue_limit_bytes', YLeaf(YType.uint64, 'hardware-queue-limit-bytes')),
('hardware_queue_limit_microseconds', YLeaf(YType.uint64, 'hardware-queue-limit-microseconds')),
('policer_bucket_id', YLeaf(YType.uint32, 'policer-bucket-id')),
('policer_stats_handle', YLeaf(YType.uint64, 'policer-stats-handle')),
('hardware_policer_average_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-average-rate-kbps')),
('hardware_policer_peak_rate_kbps', YLeaf(YType.uint32, 'hardware-policer-peak-rate-kbps')),
('hardware_policer_conform_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-conform-burst-bytes')),
('hardware_policer_excess_burst_bytes', YLeaf(YType.uint32, 'hardware-policer-excess-burst-bytes')),
])
self.level_one_class_name = None
self.level_two_class_name = None
self.class_level = None
self.egress_queue_id = None
self.queue_type = None
self.priority_level = None
self.hardware_max_rate_kbps = None
self.hardware_min_rate_kbps = None
self.config_excess_bandwidth_percent = None
self.config_excess_bandwidth_unit = None
self.hardware_excess_bandwidth_weight = None
self.network_min_bandwidth_kbps = None
self.hardware_queue_limit_bytes = None
self.hardware_queue_limit_microseconds = None
self.policer_bucket_id = None
self.policer_stats_handle = None
self.hardware_policer_average_rate_kbps = None
self.hardware_policer_peak_rate_kbps = None
self.hardware_policer_conform_burst_bytes = None
self.hardware_policer_excess_burst_bytes = None
self.config_max_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMaxRate()
self.config_max_rate.parent = self
self._children_name_map["config_max_rate"] = "config-max-rate"
self._children_yang_names.add("config-max-rate")
self.config_min_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMinRate()
self.config_min_rate.parent = self
self._children_name_map["config_min_rate"] = "config-min-rate"
self._children_yang_names.add("config-min-rate")
self.config_queue_limit = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigQueueLimit()
self.config_queue_limit.parent = self
self._children_name_map["config_queue_limit"] = "config-queue-limit"
self._children_yang_names.add("config-queue-limit")
self.config_policer_average_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerAverageRate()
self.config_policer_average_rate.parent = self
self._children_name_map["config_policer_average_rate"] = "config-policer-average-rate"
self._children_yang_names.add("config-policer-average-rate")
self.config_policer_peak_rate = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerPeakRate()
self.config_policer_peak_rate.parent = self
self._children_name_map["config_policer_peak_rate"] = "config-policer-peak-rate"
self._children_yang_names.add("config-policer-peak-rate")
self.config_policer_conform_burst = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerConformBurst()
self.config_policer_conform_burst.parent = self
self._children_name_map["config_policer_conform_burst"] = "config-policer-conform-burst"
self._children_yang_names.add("config-policer-conform-burst")
self.config_policer_excess_burst = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerExcessBurst()
self.config_policer_excess_burst.parent = self
self._children_name_map["config_policer_excess_burst"] = "config-policer-excess-burst"
self._children_yang_names.add("config-policer-excess-burst")
self.conform_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction()
self.conform_action.parent = self
self._children_name_map["conform_action"] = "conform-action"
self._children_yang_names.add("conform-action")
self.exceed_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction()
self.exceed_action.parent = self
self._children_name_map["exceed_action"] = "exceed-action"
self._children_yang_names.add("exceed-action")
self.violate_action = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction()
self.violate_action.parent = self
self._children_name_map["violate_action"] = "violate-action"
self._children_yang_names.add("violate-action")
self.ip_mark = YList(self)
self.common_mark = YList(self)
self.mpls_mark = YList(self)
self.wred = YList(self)
self._segment_path = lambda: "class" + "[level-one-class-name='" + str(self.level_one_class_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class, ['level_one_class_name', 'level_two_class_name', 'class_level', 'egress_queue_id', 'queue_type', 'priority_level', 'hardware_max_rate_kbps', 'hardware_min_rate_kbps', 'config_excess_bandwidth_percent', 'config_excess_bandwidth_unit', 'hardware_excess_bandwidth_weight', 'network_min_bandwidth_kbps', 'hardware_queue_limit_bytes', 'hardware_queue_limit_microseconds', 'policer_bucket_id', 'policer_stats_handle', 'hardware_policer_average_rate_kbps', 'hardware_policer_peak_rate_kbps', 'hardware_policer_conform_burst_bytes', 'hardware_policer_excess_burst_bytes'], name, value)
class ConfigMaxRate(Entity):
"""
Configured maximum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMaxRate, self).__init__()
self.yang_name = "config-max-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMaxRate, ['policy_value', 'policy_unit'], name, value)
class ConfigMinRate(Entity):
"""
Configured minimum rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMinRate, self).__init__()
self.yang_name = "config-min-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigMinRate, ['policy_value', 'policy_unit'], name, value)
class ConfigQueueLimit(Entity):
"""
Configured queue limit
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigQueueLimit, self).__init__()
self.yang_name = "config-queue-limit"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-queue-limit"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigQueueLimit, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerAverageRate(Entity):
"""
Configured policer average rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerAverageRate, self).__init__()
self.yang_name = "config-policer-average-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-average-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerAverageRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerPeakRate(Entity):
"""
Config policer peak rate
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerPeakRate, self).__init__()
self.yang_name = "config-policer-peak-rate"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-peak-rate"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerPeakRate, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerConformBurst(Entity):
"""
Configured policer conform burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerConformBurst, self).__init__()
self.yang_name = "config-policer-conform-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-conform-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerConformBurst, ['policy_value', 'policy_unit'], name, value)
class ConfigPolicerExcessBurst(Entity):
"""
Configured policer excess burst
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerExcessBurst, self).__init__()
self.yang_name = "config-policer-excess-burst"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-policer-excess-burst"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConfigPolicerExcessBurst, ['policy_value', 'policy_unit'], name, value)
class ConformAction(Entity):
"""
Conform action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction, self).__init__()
self.yang_name = "conform-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "conform-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "conform-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ConformAction.Mark, ['mark_type', 'mark_value'], name, value)
class ExceedAction(Entity):
"""
Exceed action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction, self).__init__()
self.yang_name = "exceed-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "exceed-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "exceed-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ExceedAction.Mark, ['mark_type', 'mark_value'], name, value)
class ViolateAction(Entity):
"""
Violate action
.. attribute:: action_type
Policer action type
**type**\: :py:class:`DnxQoseaShowAction <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowAction>`
.. attribute:: mark
Action mark
**type**\: list of :py:class:`Mark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction.Mark>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction, self).__init__()
self.yang_name = "violate-action"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("mark", ("mark", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction.Mark))])
self._leafs = OrderedDict([
('action_type', YLeaf(YType.enumeration, 'action-type')),
])
self.action_type = None
self.mark = YList(self)
self._segment_path = lambda: "violate-action"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction, ['action_type'], name, value)
class Mark(Entity):
"""
Action mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction.Mark, self).__init__()
self.yang_name = "mark"
self.yang_parent_name = "violate-action"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.ViolateAction.Mark, ['mark_type', 'mark_value'], name, value)
class IpMark(Entity):
"""
IP mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.IpMark, self).__init__()
self.yang_name = "ip-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "ip-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.IpMark, ['mark_type', 'mark_value'], name, value)
class CommonMark(Entity):
"""
Common mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.CommonMark, self).__init__()
self.yang_name = "common-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "common-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.CommonMark, ['mark_type', 'mark_value'], name, value)
class MplsMark(Entity):
"""
MPLS mark
.. attribute:: mark_type
Mark type
**type**\: :py:class:`DnxQoseaShowMark <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowMark>`
.. attribute:: mark_value
Mark value
**type**\: int
**range:** 0..65535
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.MplsMark, self).__init__()
self.yang_name = "mpls-mark"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('mark_type', YLeaf(YType.enumeration, 'mark-type')),
('mark_value', YLeaf(YType.uint16, 'mark-value')),
])
self.mark_type = None
self.mark_value = None
self._segment_path = lambda: "mpls-mark"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.MplsMark, ['mark_type', 'mark_value'], name, value)
class Wred(Entity):
"""
WRED parameters
.. attribute:: wred_match_value
WRED match values
**type**\: :py:class:`WredMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue>`
.. attribute:: config_min_threshold
Configured minimum threshold
**type**\: :py:class:`ConfigMinThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMinThreshold>`
.. attribute:: config_max_threshold
Configured maximum threshold
**type**\: :py:class:`ConfigMaxThreshold <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMaxThreshold>`
.. attribute:: wred_match_type
WREDMatchType
**type**\: :py:class:`DnxQoseaShowWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.DnxQoseaShowWred>`
.. attribute:: hardware_min_threshold_bytes
Hardware minimum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: hardware_max_threshold_bytes
Hardware maximum threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: first_segment
First segment
**type**\: int
**range:** 0..65535
.. attribute:: segment_size
Segment size
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([("wred-match-value", ("wred_match_value", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue)), ("config-min-threshold", ("config_min_threshold", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMinThreshold)), ("config-max-threshold", ("config_max_threshold", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMaxThreshold))])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('wred_match_type', YLeaf(YType.enumeration, 'wred-match-type')),
('hardware_min_threshold_bytes', YLeaf(YType.uint32, 'hardware-min-threshold-bytes')),
('hardware_max_threshold_bytes', YLeaf(YType.uint32, 'hardware-max-threshold-bytes')),
('first_segment', YLeaf(YType.uint16, 'first-segment')),
('segment_size', YLeaf(YType.uint32, 'segment-size')),
])
self.wred_match_type = None
self.hardware_min_threshold_bytes = None
self.hardware_max_threshold_bytes = None
self.first_segment = None
self.segment_size = None
self.wred_match_value = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue()
self.wred_match_value.parent = self
self._children_name_map["wred_match_value"] = "wred-match-value"
self._children_yang_names.add("wred-match-value")
self.config_min_threshold = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMinThreshold()
self.config_min_threshold.parent = self
self._children_name_map["config_min_threshold"] = "config-min-threshold"
self._children_yang_names.add("config-min-threshold")
self.config_max_threshold = PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMaxThreshold()
self.config_max_threshold.parent = self
self._children_name_map["config_max_threshold"] = "config-max-threshold"
self._children_yang_names.add("config-max-threshold")
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred, ['wred_match_type', 'hardware_min_threshold_bytes', 'hardware_max_threshold_bytes', 'first_segment', 'segment_size'], name, value)
class WredMatchValue(Entity):
"""
WRED match values
.. attribute:: dnx_qosea_show_red_match_value
dnx qosea show red match value
**type**\: list of :py:class:`DnxQoseaShowRedMatchValue <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue, self).__init__()
self.yang_name = "wred-match-value"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("dnx-qosea-show-red-match-value", ("dnx_qosea_show_red_match_value", PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue))])
self._leafs = OrderedDict()
self.dnx_qosea_show_red_match_value = YList(self)
self._segment_path = lambda: "wred-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue, [], name, value)
class DnxQoseaShowRedMatchValue(Entity):
"""
dnx qosea show red match value
.. attribute:: range_start
Start value of a range
**type**\: int
**range:** 0..255
.. attribute:: range_end
End value of a range
**type**\: int
**range:** 0..255
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, self).__init__()
self.yang_name = "dnx-qosea-show-red-match-value"
self.yang_parent_name = "wred-match-value"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('range_start', YLeaf(YType.uint8, 'range-start')),
('range_end', YLeaf(YType.uint8, 'range-end')),
])
self.range_start = None
self.range_end = None
self._segment_path = lambda: "dnx-qosea-show-red-match-value"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.WredMatchValue.DnxQoseaShowRedMatchValue, ['range_start', 'range_end'], name, value)
class ConfigMinThreshold(Entity):
"""
Configured minimum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMinThreshold, self).__init__()
self.yang_name = "config-min-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-min-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMinThreshold, ['policy_value', 'policy_unit'], name, value)
class ConfigMaxThreshold(Entity):
"""
Configured maximum threshold
.. attribute:: policy_value
Policy value
**type**\: int
**range:** 0..4294967295
.. attribute:: policy_unit
Policy unit
**type**\: :py:class:`PolicyParamUnit <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PolicyParamUnit>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMaxThreshold, self).__init__()
self.yang_name = "config-max-threshold"
self.yang_parent_name = "wred"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('policy_value', YLeaf(YType.uint32, 'policy-value')),
('policy_unit', YLeaf(YType.enumeration, 'policy-unit')),
])
self.policy_value = None
self.policy_unit = None
self._segment_path = lambda: "config-max-threshold"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.BundleInterfaceSingles.BundleInterfaceSingle.Classes.Class.Wred.ConfigMaxThreshold, ['policy_value', 'policy_unit'], name, value)
class RemoteInterfaces(Entity):
"""
QoS list of remote interfaces
.. attribute:: remote_interface
QoS remote interface names
**type**\: list of :py:class:`RemoteInterface <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.RemoteInterfaces, self).__init__()
self.yang_name = "remote-interfaces"
self.yang_parent_name = "node"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("remote-interface", ("remote_interface", PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface))])
self._leafs = OrderedDict()
self.remote_interface = YList(self)
self._segment_path = lambda: "remote-interfaces"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.RemoteInterfaces, [], name, value)
class RemoteInterface(Entity):
"""
QoS remote interface names
.. attribute:: interface_name (key)
The name of the remote interface
**type**\: str
**pattern:** [a\-zA\-Z0\-9./\-]+
.. attribute:: policy_name
Policy Name
**type**\: str
**length:** 0..64
.. attribute:: virtual_output_queue_statistics_handle
Virtual output queue statistics handle
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: interface_handle
Interface Handle
**type**\: int
**range:** 0..4294967295
.. attribute:: number_of_virtual_output_queues
Number of Virtual Output Queues
**type**\: int
**range:** 0..4294967295
.. attribute:: number_of_classes
Number of Classes
**type**\: int
**range:** 0..4294967295
.. attribute:: remote_class
Remote Class array
**type**\: list of :py:class:`RemoteClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface, self).__init__()
self.yang_name = "remote-interface"
self.yang_parent_name = "remote-interfaces"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = ['interface_name']
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("remote-class", ("remote_class", PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass))])
self._leafs = OrderedDict([
('interface_name', YLeaf(YType.str, 'interface-name')),
('policy_name', YLeaf(YType.str, 'policy-name')),
('virtual_output_queue_statistics_handle', YLeaf(YType.uint64, 'virtual-output-queue-statistics-handle')),
('interface_handle', YLeaf(YType.uint32, 'interface-handle')),
('number_of_virtual_output_queues', YLeaf(YType.uint32, 'number-of-virtual-output-queues')),
('number_of_classes', YLeaf(YType.uint32, 'number-of-classes')),
])
self.interface_name = None
self.policy_name = None
self.virtual_output_queue_statistics_handle = None
self.interface_handle = None
self.number_of_virtual_output_queues = None
self.number_of_classes = None
self.remote_class = YList(self)
self._segment_path = lambda: "remote-interface" + "[interface-name='" + str(self.interface_name) + "']"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface, ['interface_name', 'policy_name', 'virtual_output_queue_statistics_handle', 'interface_handle', 'number_of_virtual_output_queues', 'number_of_classes'], name, value)
class RemoteClass(Entity):
"""
Remote Class array
.. attribute:: class_name
Class Name
**type**\: str
**length:** 0..64
.. attribute:: class_id
Class ID
**type**\: int
**range:** 0..4294967295
.. attribute:: cos_q
Class of Service Queue
**type**\: int
**range:** 0..4294967295
.. attribute:: queue_limit
Default/Configured queue limit in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: hardware_queue_limit
Hardware queue limit in bytes
**type**\: int
**range:** 0..4294967295
**units**\: byte
.. attribute:: wred
Default/Configured WRED profiles
**type**\: list of :py:class:`Wred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.Wred>`
.. attribute:: hw_wred
Hardware WRED profiles
**type**\: list of :py:class:`HwWred <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ncs5500_qos_oper.PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.HwWred>`
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass, self).__init__()
self.yang_name = "remote-class"
self.yang_parent_name = "remote-interface"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([("wred", ("wred", PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.Wred)), ("hw-wred", ("hw_wred", PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.HwWred))])
self._leafs = OrderedDict([
('class_name', YLeaf(YType.str, 'class-name')),
('class_id', YLeaf(YType.uint32, 'class-id')),
('cos_q', YLeaf(YType.uint32, 'cos-q')),
('queue_limit', YLeaf(YType.uint32, 'queue-limit')),
('hardware_queue_limit', YLeaf(YType.uint32, 'hardware-queue-limit')),
])
self.class_name = None
self.class_id = None
self.cos_q = None
self.queue_limit = None
self.hardware_queue_limit = None
self.wred = YList(self)
self.hw_wred = YList(self)
self._segment_path = lambda: "remote-class"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass, ['class_name', 'class_id', 'cos_q', 'queue_limit', 'hardware_queue_limit'], name, value)
class Wred(Entity):
"""
Default/Configured WRED profiles
.. attribute:: min_threshold
Minimum Threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: max_threshold
Maximum Threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: drop_probability
Drop Probability
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.Wred, self).__init__()
self.yang_name = "wred"
self.yang_parent_name = "remote-class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('min_threshold', YLeaf(YType.uint32, 'min-threshold')),
('max_threshold', YLeaf(YType.uint32, 'max-threshold')),
('drop_probability', YLeaf(YType.uint32, 'drop-probability')),
])
self.min_threshold = None
self.max_threshold = None
self.drop_probability = None
self._segment_path = lambda: "wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.Wred, ['min_threshold', 'max_threshold', 'drop_probability'], name, value)
class HwWred(Entity):
"""
Hardware WRED profiles
.. attribute:: min_threshold
Minimum Threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: max_threshold
Maximum Threshold
**type**\: int
**range:** 0..4294967295
.. attribute:: drop_probability
Drop Probability
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ncs5500-qos-oper'
_revision = '2015-11-09'
def __init__(self):
super(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.HwWred, self).__init__()
self.yang_name = "hw-wred"
self.yang_parent_name = "remote-class"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_container_classes = OrderedDict([])
self._child_list_classes = OrderedDict([])
self._leafs = OrderedDict([
('min_threshold', YLeaf(YType.uint32, 'min-threshold')),
('max_threshold', YLeaf(YType.uint32, 'max-threshold')),
('drop_probability', YLeaf(YType.uint32, 'drop-probability')),
])
self.min_threshold = None
self.max_threshold = None
self.drop_probability = None
self._segment_path = lambda: "hw-wred"
def __setattr__(self, name, value):
self._perform_setattr(PlatformQos.Nodes.Node.RemoteInterfaces.RemoteInterface.RemoteClass.HwWred, ['min_threshold', 'max_threshold', 'drop_probability'], name, value)
def clone_ptr(self):
self._top_entity = PlatformQos()
return self._top_entity
| 59.66316
| 1,939
| 0.41548
| 35,236
| 533,150
| 5.977466
| 0.010075
| 0.047858
| 0.059348
| 0.021631
| 0.966267
| 0.955185
| 0.948581
| 0.940286
| 0.932125
| 0.930729
| 0
| 0.022926
| 0.512385
| 533,150
| 8,935
| 1,940
| 59.669838
| 0.787246
| 0.179145
| 0
| 0.831132
| 0
| 0
| 0.113637
| 0.040398
| 0
| 0
| 0
| 0
| 0
| 1
| 0.07755
| false
| 0
| 0.001405
| 0
| 0.143018
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a28729200ab9630bcacebe8c3c784fbaa0fbee89
| 86
|
py
|
Python
|
dev/get_data.py
|
patarapolw/ttslib
|
4b7be8067899aa2a3514b9f75fa39f4160da4972
|
[
"MIT"
] | 2
|
2018-12-08T22:49:10.000Z
|
2019-02-20T14:02:03.000Z
|
dev/get_data.py
|
patarapolw/ttslib
|
4b7be8067899aa2a3514b9f75fa39f4160da4972
|
[
"MIT"
] | null | null | null |
dev/get_data.py
|
patarapolw/ttslib
|
4b7be8067899aa2a3514b9f75fa39f4160da4972
|
[
"MIT"
] | null | null | null |
import os
print(os.listdir('/Users/patarapolw/PycharmProjects/ttslib/ttslib/data'))
| 17.2
| 73
| 0.790698
| 11
| 86
| 6.181818
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.05814
| 86
| 4
| 74
| 21.5
| 0.839506
| 0
| 0
| 0
| 0
| 0
| 0.604651
| 0.604651
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
a2a00971aa9ae256047352fab59aad4cbd5f7d45
| 22,973
|
py
|
Python
|
fhir/resources/DSTU2/tests/test_encounter.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 144
|
2019-05-08T14:24:43.000Z
|
2022-03-30T02:37:11.000Z
|
fhir/resources/DSTU2/tests/test_encounter.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 82
|
2019-05-13T17:43:13.000Z
|
2022-03-30T16:45:17.000Z
|
fhir/resources/DSTU2/tests/test_encounter.py
|
cstoltze/fhir.resources
|
52f99738935b7313089d89daf94d73ce7d167c9d
|
[
"BSD-3-Clause"
] | 48
|
2019-04-04T14:14:53.000Z
|
2022-03-30T06:07:31.000Z
|
# -*- coding: utf-8 -*-
from datetime import datetime, timezone
from decimal import Decimal
from .. import fhirtypes # noqa: F401
from .. import encounter
def test_Encounter_1(base_settings):
filename = (
base_settings["unittest_data_dir"]
/ "encounter-example-f001-heart.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_1(inst2)
def impl_Encounter_1(inst):
assert inst.hospitalization.admitSource.coding[0].code == "305956004"
assert inst.hospitalization.admitSource.coding[0].display == "Referral by physician"
assert inst.hospitalization.admitSource.coding[0].system == "http://snomed.info/sct"
assert inst.hospitalization.dischargeDisposition.coding[0].code == "306689006"
assert (
inst.hospitalization.dischargeDisposition.coding[0].display
== "Discharge to home"
)
assert (
inst.hospitalization.dischargeDisposition.coding[0].system
== "http://snomed.info/sct"
)
assert (
inst.hospitalization.preAdmissionIdentifier.system
== "http://www.amc.nl/zorgportal/identifiers/pre-admissions"
)
assert inst.hospitalization.preAdmissionIdentifier.use == "official"
assert inst.hospitalization.preAdmissionIdentifier.value == "93042"
assert inst.id == "f001"
assert (
inst.identifier[0].system == "http://www.amc.nl/zorgportal/identifiers/visits"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "v1451"
assert inst.length.code == "min"
assert inst.length.system == "http://unitsofmeasure.org"
assert inst.length.unit == "min"
assert inst.length.value == Decimal("140")
assert inst.participant[0].individual.display == "P. Voigt"
assert inst.participant[0].individual.reference == "Practitioner/f002"
assert inst.patient.display == "P. van de Heuvel"
assert inst.patient.reference == "Patient/f001"
assert inst.priority.coding[0].code == "310361003"
assert inst.priority.coding[0].display == "Non-urgent cardiological admission"
assert inst.priority.coding[0].system == "http://snomed.info/sct"
assert inst.reason[0].coding[0].code == "34068001"
assert inst.reason[0].coding[0].display == "Heart valve replacement"
assert inst.reason[0].coding[0].system == "http://snomed.info/sct"
assert inst.serviceProvider.display == "Burgers University Medical Center"
assert inst.serviceProvider.reference == "Organization/f001"
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: f001</p><p><b>identifier</b>: v1451 (OFFICIAL)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>type</b>: Patient-initiated encounter <span>(Details : {SNOMED CT code '270427003' = '270427003', given as 'Patient-initiated encounter'})</span></p><p><b>priority</b>: Non-urgent cardiological admission <span>(Details : {SNOMED CT code '310361003' = '310361003', given as 'Non-urgent cardiological admission'})</span></p><p><b>patient</b>: <a>P. van de Heuvel</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>P. Voigt</a></td></tr></table><p><b>length</b>: 140 min<span> (Details: http://unitsofmeasure.org code min = '??')</span></p><p><b>reason</b>: Heart valve replacement <span>(Details : {SNOMED CT code '34068001' = '34068001', given as 'Heart valve replacement'})</span></p><h3>Hospitalizations</h3><table><tr><td>-</td><td><b>PreAdmissionIdentifier</b></td><td><b>AdmitSource</b></td><td><b>DischargeDisposition</b></td></tr><tr><td>*</td><td>93042 (OFFICIAL)</td><td>Referral by physician <span>(Details : {SNOMED CT code '305956004' = '305956004', given as 'Referral by physician'})</span></td><td>Discharge to home <span>(Details : {SNOMED CT code '306689006' = '306689006', given as 'Discharge to home'})</span></td></tr></table><p><b>serviceProvider</b>: <a>Burgers University Medical Center</a></p></div>"
)
assert inst.text.status == "generated"
assert inst.type[0].coding[0].code == "270427003"
assert inst.type[0].coding[0].display == "Patient-initiated encounter"
assert inst.type[0].coding[0].system == "http://snomed.info/sct"
def test_Encounter_2(base_settings):
filename = (
base_settings["unittest_data_dir"]
/ "encounter-example-f002-lung.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_2(inst2)
def impl_Encounter_2(inst):
assert inst.hospitalization.admitSource.coding[0].code == "305997006"
assert (
inst.hospitalization.admitSource.coding[0].display == "Referral by radiologist"
)
assert inst.hospitalization.admitSource.coding[0].system == "http://snomed.info/sct"
assert inst.hospitalization.dischargeDisposition.coding[0].code == "306689006"
assert (
inst.hospitalization.dischargeDisposition.coding[0].display
== "Discharge to home"
)
assert (
inst.hospitalization.dischargeDisposition.coding[0].system
== "http://snomed.info/sct"
)
assert (
inst.hospitalization.preAdmissionIdentifier.system
== "http://www.bmc.nl/zorgportal/identifiers/pre-admissions"
)
assert inst.hospitalization.preAdmissionIdentifier.use == "official"
assert inst.hospitalization.preAdmissionIdentifier.value == "98682"
assert inst.id == "f002"
assert (
inst.identifier[0].system
== "http://www.bmc.nl/zorgportal/identifiers/encounters"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "v3251"
assert inst.length.code == "min"
assert inst.length.system == "http://unitsofmeasure.org"
assert inst.length.unit == "min"
assert inst.length.value == Decimal("140")
assert inst.participant[0].individual.display == "M.I.M Versteegh"
assert inst.participant[0].individual.reference == "Practitioner/f003"
assert inst.patient.display == "P. van de Heuvel"
assert inst.patient.reference == "Patient/f001"
assert inst.priority.coding[0].code == "103391001"
assert inst.priority.coding[0].display == "Urgent"
assert inst.priority.coding[0].system == "http://snomed.info/sct"
assert inst.reason[0].coding[0].code == "34068001"
assert inst.reason[0].coding[0].display == "Partial lobectomy of lung"
assert inst.reason[0].coding[0].system == "http://snomed.info/sct"
assert inst.serviceProvider.display == "BMC"
assert inst.serviceProvider.reference == "Organization/f001"
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: f002</p><p><b>identifier</b>: v3251 (OFFICIAL)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>type</b>: Patient-initiated encounter <span>(Details : {SNOMED CT code '270427003' = '270427003', given as 'Patient-initiated encounter'})</span></p><p><b>priority</b>: Urgent <span>(Details : {SNOMED CT code '103391001' = '103391001', given as 'Urgent'})</span></p><p><b>patient</b>: <a>P. van de Heuvel</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>M.I.M Versteegh</a></td></tr></table><p><b>length</b>: 140 min<span> (Details: http://unitsofmeasure.org code min = '??')</span></p><p><b>reason</b>: Partial lobectomy of lung <span>(Details : {SNOMED CT code '34068001' = '34068001', given as 'Partial lobectomy of lung'})</span></p><h3>Hospitalizations</h3><table><tr><td>-</td><td><b>PreAdmissionIdentifier</b></td><td><b>AdmitSource</b></td><td><b>DischargeDisposition</b></td></tr><tr><td>*</td><td>98682 (OFFICIAL)</td><td>Referral by radiologist <span>(Details : {SNOMED CT code '305997006' = '305997006', given as 'Referral by radiologist'})</span></td><td>Discharge to home <span>(Details : {SNOMED CT code '306689006' = '306689006', given as 'Discharge to home'})</span></td></tr></table><p><b>serviceProvider</b>: <a>BMC</a></p></div>"
)
assert inst.text.status == "generated"
assert inst.type[0].coding[0].code == "270427003"
assert inst.type[0].coding[0].display == "Patient-initiated encounter"
assert inst.type[0].coding[0].system == "http://snomed.info/sct"
def test_Encounter_3(base_settings):
filename = (
base_settings["unittest_data_dir"]
/ "encounter-example-f003-abscess.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_3(inst2)
def impl_Encounter_3(inst):
assert inst.hospitalization.admitSource.coding[0].code == "305956004"
assert inst.hospitalization.admitSource.coding[0].display == "Referral by physician"
assert inst.hospitalization.admitSource.coding[0].system == "http://snomed.info/sct"
assert inst.hospitalization.dischargeDisposition.coding[0].code == "306689006"
assert (
inst.hospitalization.dischargeDisposition.coding[0].display
== "Discharge to home"
)
assert (
inst.hospitalization.dischargeDisposition.coding[0].system
== "http://snomed.info/sct"
)
assert (
inst.hospitalization.preAdmissionIdentifier.system
== "http://www.bmc.nl/zorgportal/identifiers/pre-admissions"
)
assert inst.hospitalization.preAdmissionIdentifier.use == "official"
assert inst.hospitalization.preAdmissionIdentifier.value == "93042"
assert inst.id == "f003"
assert (
inst.identifier[0].system
== "http://www.bmc.nl/zorgportal/identifiers/encounters"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "v6751"
assert inst.length.code == "min"
assert inst.length.system == "http://unitsofmeasure.org"
assert inst.length.unit == "min"
assert inst.length.value == Decimal("90")
assert inst.participant[0].individual.display == "E.M. van den Broek"
assert inst.participant[0].individual.reference == "Practitioner/f001"
assert inst.patient.display == "P. van de Heuvel"
assert inst.patient.reference == "Patient/f001"
assert inst.priority.coding[0].code == "103391001"
assert (
inst.priority.coding[0].display == "Non-urgent ear, nose and throat admission"
)
assert inst.priority.coding[0].system == "http://snomed.info/sct"
assert inst.reason[0].coding[0].code == "18099001"
assert inst.reason[0].coding[0].display == "Retropharyngeal abscess"
assert inst.reason[0].coding[0].system == "http://snomed.info/sct"
assert (
inst.reason[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/encounter-primaryDiagnosis"
)
assert inst.reason[0].extension[0].valueInteger == 1
assert inst.serviceProvider.reference == "Organization/f001"
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: f003</p><p><b>identifier</b>: v6751 (OFFICIAL)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>type</b>: Patient-initiated encounter <span>(Details : {SNOMED CT code '270427003' = '270427003', given as 'Patient-initiated encounter'})</span></p><p><b>priority</b>: Non-urgent ear, nose and throat admission <span>(Details : {SNOMED CT code '103391001' = '103391001', given as 'Non-urgent ear, nose and throat admission'})</span></p><p><b>patient</b>: <a>P. van de Heuvel</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>E.M. van den Broek</a></td></tr></table><p><b>length</b>: 90 min<span> (Details: http://unitsofmeasure.org code min = '??')</span></p><p><b>reason</b>: Retropharyngeal abscess <span>(Details : {SNOMED CT code '18099001' = '18099001', given as 'Retropharyngeal abscess'})</span></p><h3>Hospitalizations</h3><table><tr><td>-</td><td><b>PreAdmissionIdentifier</b></td><td><b>AdmitSource</b></td><td><b>DischargeDisposition</b></td></tr><tr><td>*</td><td>93042 (OFFICIAL)</td><td>Referral by physician <span>(Details : {SNOMED CT code '305956004' = '305956004', given as 'Referral by physician'})</span></td><td>Discharge to home <span>(Details : {SNOMED CT code '306689006' = '306689006', given as 'Discharge to home'})</span></td></tr></table><p><b>serviceProvider</b>: <a>Organization/f001</a></p></div>"
)
assert inst.text.status == "generated"
assert inst.type[0].coding[0].code == "270427003"
assert inst.type[0].coding[0].display == "Patient-initiated encounter"
assert inst.type[0].coding[0].system == "http://snomed.info/sct"
def test_Encounter_4(base_settings):
filename = (
base_settings["unittest_data_dir"]
/ "encounter-example-f201-20130404.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_4(inst2)
def impl_Encounter_4(inst):
assert inst.id == "f201"
assert inst.identifier[0].use == "temp"
assert inst.identifier[0].value == "Encounter_Roel_20130404"
assert inst.participant[0].individual.reference == "Practitioner/f201"
assert inst.patient.display == "Roel"
assert inst.patient.reference == "Patient/f201"
assert inst.priority.coding[0].code == "17621005"
assert inst.priority.coding[0].display == "Normal"
assert inst.priority.coding[0].system == "http://snomed.info/sct"
assert (
inst.reason[0].text
== "The patient had fever peaks over the last couple of days. He is worried about these peaks."
)
assert inst.serviceProvider.reference == "Organization/f201"
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: f201</p><p><b>identifier</b>: Encounter_Roel_20130404 (TEMP)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>type</b>: Consultation <span>(Details : {SNOMED CT code '11429006' = '11429006', given as 'Consultation'})</span></p><p><b>priority</b>: Normal <span>(Details : {SNOMED CT code '17621005' = '17621005', given as 'Normal'})</span></p><p><b>patient</b>: <a>Roel</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>Practitioner/f201</a></td></tr></table><p><b>reason</b>: The patient had fever peaks over the last couple of days. He is worried about these peaks. <span>(Details )</span></p><p><b>serviceProvider</b>: <a>Organization/f201</a></p></div>"
)
assert inst.text.status == "generated"
assert inst.type[0].coding[0].code == "11429006"
assert inst.type[0].coding[0].display == "Consultation"
assert inst.type[0].coding[0].system == "http://snomed.info/sct"
def test_Encounter_5(base_settings):
filename = (
base_settings["unittest_data_dir"]
/ "encounter-example-f202-20130128.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_5(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_5(inst2)
def impl_Encounter_5(inst):
assert inst.id == "f202"
assert inst.identifier[0].use == "temp"
assert inst.identifier[0].value == "Encounter_Roel_20130128"
assert inst.indication[0].display == "Roel's TPF chemotherapy on January 28th, 2013"
assert (
inst.indication[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/encounter-primaryDiagnosis"
)
assert inst.indication[0].extension[0].valueInteger == 1
assert inst.indication[0].reference == "Procedure/f201"
assert inst.length.code == "258701004"
assert inst.length.system == "http://snomed.info/sct"
assert inst.length.unit == "minutes"
assert inst.length.value == Decimal("56")
assert inst.participant[0].individual.reference == "Practitioner/f201"
assert inst.patient.display == "Roel"
assert inst.patient.reference == "Patient/f201"
assert inst.priority.coding[0].code == "103391001"
assert inst.priority.coding[0].display == "Urgent"
assert inst.priority.coding[0].system == "http://snomed.info/sct"
assert (
inst.reason[0].extension[0].url
== "http://hl7.org/fhir/StructureDefinition/encounter-primaryDiagnosis"
)
assert inst.reason[0].extension[0].valueInteger == 2
assert inst.reason[0].text == "The patient is treated for a tumor."
assert inst.serviceProvider.reference == "Organization/f201"
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: f202</p><p><b>identifier</b>: Encounter_Roel_20130128 (TEMP)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>type</b>: Chemotherapy <span>(Details : {SNOMED CT code '367336001' = '367336001', given as 'Chemotherapy'})</span></p><p><b>priority</b>: Urgent <span>(Details : {SNOMED CT code '103391001' = '103391001', given as 'Urgent'})</span></p><p><b>patient</b>: <a>Roel</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>Practitioner/f201</a></td></tr></table><p><b>length</b>: 56 minutes<span> (Details: SNOMED CT code 258701004 = '258701004')</span></p><p><b>reason</b>: The patient is treated for a tumor. <span>(Details )</span></p><p><b>indication</b>: <a>Roel's TPF chemotherapy on January 28th, 2013</a></p><p><b>serviceProvider</b>: <a>Organization/f201</a></p></div>"
)
assert inst.text.status == "generated"
assert inst.type[0].coding[0].code == "367336001"
assert inst.type[0].coding[0].display == "Chemotherapy"
assert inst.type[0].coding[0].system == "http://snomed.info/sct"
def test_Encounter_8(base_settings):
filename = (
base_settings["unittest_data_dir"] / "encounter-example-xcda.canonical.json"
)
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_8(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_8(inst2)
def impl_Encounter_8(inst):
assert inst.id == "xcda"
assert (
inst.identifier[0].system
== "http://healthcare.example.org/identifiers/enocunter"
)
assert inst.identifier[0].use == "official"
assert inst.identifier[0].value == "1234213.52345873"
assert inst.participant[0].individual.reference == "Practitioner/xcda1"
assert inst.patient.reference == "Patient/xcda"
assert inst.reason[0].coding[0].code == "T-D8200"
assert inst.reason[0].coding[0].display == "Arm"
assert (
inst.reason[0].coding[0].system == "http://ihe.net/xds/connectathon/eventCodes"
)
assert inst.status == "finished"
assert (
inst.text.div
== "<div><p><b>Generated Narrative with Details</b></p><p><b>id</b>: xcda</p><p><b>identifier</b>: 1234213.52345873 (OFFICIAL)</p><p><b>status</b>: finished</p><p><b>class</b>: outpatient</p><p><b>patient</b>: <a>Patient/xcda</a></p><h3>Participants</h3><table><tr><td>-</td><td><b>Individual</b></td></tr><tr><td>*</td><td><a>Practitioner/xcda1</a></td></tr></table><p><b>reason</b>: Arm <span>(Details : {http://ihe.net/xds/connectathon/eventCodes code 'T-D8200' = '??', given as 'Arm'})</span></p></div>"
)
assert inst.text.status == "generated"
def test_Encounter_9(base_settings):
filename = base_settings["unittest_data_dir"] / "encounter-example.canonical.json"
inst = encounter.Encounter.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "Encounter" == inst.resource_type
impl_Encounter_9(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "Encounter" == data["resourceType"]
inst2 = encounter.Encounter(**data)
impl_Encounter_9(inst2)
def impl_Encounter_9(inst):
assert inst.extension[0].extension[0].url == "condition"
assert inst.extension[0].extension[0].valueReference.reference == "Condition/qicore"
assert inst.extension[0].extension[1].url == "role"
assert (
inst.extension[0].extension[1].valueCodeableConcept.coding[0].code == "8319008"
)
assert (
inst.extension[0].extension[1].valueCodeableConcept.coding[0].display
== "Principal diagnosis"
)
assert (
inst.extension[0].extension[1].valueCodeableConcept.coding[0].system
== "http://snomed.info/sct"
)
assert (
inst.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/encounter-relatedCondition"
)
assert inst.hospitalization.dischargeDisposition.coding[0].code == "home"
assert inst.hospitalization.dischargeDisposition.coding[0].display == "Home"
assert (
inst.hospitalization.dischargeDisposition.coding[0].system
== "http://hl7.org/fhir/discharge-disposition"
)
assert inst.id == "encounter-example"
assert inst.patient.reference == "patient-example"
assert inst.period.end == datetime(2015, 2, 20, 00, 00, 00, tzinfo=timezone.utc)
assert inst.period.start == datetime(2015, 2, 9, 00, 00, 00, tzinfo=timezone.utc)
assert inst.status == "finished"
assert inst.text.div == "<div>Encounter with patient @qicore</div>"
assert inst.text.status == "generated"
| 54.181604
| 1,473
| 0.672442
| 3,005
| 22,973
| 5.103161
| 0.082196
| 0.116726
| 0.009195
| 0.024389
| 0.887056
| 0.85895
| 0.830518
| 0.778741
| 0.754809
| 0.717705
| 0
| 0.054492
| 0.149262
| 22,973
| 423
| 1,474
| 54.309693
| 0.730147
| 0.021199
| 0
| 0.55914
| 0
| 0.016129
| 0.45097
| 0.176677
| 0
| 0
| 0
| 0
| 0.518817
| 1
| 0.037634
| false
| 0
| 0.010753
| 0
| 0.048387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2b9943441e25220ac31f5fecd3151d667d78267
| 295
|
py
|
Python
|
main.py
|
vulovicv23/proj
|
fc02147ae51e4e142e06c8d07e03ffd6ae4e3673
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
vulovicv23/proj
|
fc02147ae51e4e142e06c8d07e03ffd6ae4e3673
|
[
"Apache-2.0"
] | null | null | null |
main.py
|
vulovicv23/proj
|
fc02147ae51e4e142e06c8d07e03ffd6ae4e3673
|
[
"Apache-2.0"
] | null | null | null |
def main():
print("Hello2 World!")
print("Hello2 World!")
print("Hello2 World!")
print("Hello2 World!")
print("Hello World!")
print("Hello World!")
print("Hello World!")
print("Hello World!")
def vule1():
print("asd")
if __name__ == "__main__":
main()
| 18.4375
| 26
| 0.572881
| 34
| 295
| 4.735294
| 0.294118
| 0.434783
| 0.397516
| 0.521739
| 0.770186
| 0.770186
| 0.770186
| 0.770186
| 0.770186
| 0.770186
| 0
| 0.022124
| 0.233898
| 295
| 16
| 27
| 18.4375
| 0.690265
| 0
| 0
| 0.615385
| 0
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| true
| 0
| 0
| 0
| 0.153846
| 0.692308
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
a2ef2e18c4e74e4cdfb07368078bb0a6c85dc6ae
| 11,279
|
py
|
Python
|
AutoConfigGUI/icons_rc.py
|
Polly2014/AutoConfiguration
|
678a78ca880ec78de66276629db8c8e526fe1768
|
[
"MIT"
] | 1
|
2020-02-25T04:36:23.000Z
|
2020-02-25T04:36:23.000Z
|
AutoConfigGUI/icons_rc.py
|
Polly2014/AutoConfiguration
|
678a78ca880ec78de66276629db8c8e526fe1768
|
[
"MIT"
] | null | null | null |
AutoConfigGUI/icons_rc.py
|
Polly2014/AutoConfiguration
|
678a78ca880ec78de66276629db8c8e526fe1768
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.13.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x04\x7e\
\x00\
\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x04\x00\x00\x8c\xb8\x00\
\x00\x8c\xb8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x15\x00\x00\x00\x73\x00\x00\x00\xc6\x00\x00\x00\xde\x00\x00\x00\
\xdd\x00\x00\x00\xdf\x00\x00\x00\xcb\x00\x00\x00\x7f\x00\x00\x00\
\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x33\x00\x00\x00\
\xb6\x00\x00\x00\xb4\x00\x00\x00\x61\x00\x00\x00\x2f\x00\x00\x00\
\x21\x00\x00\x00\x2c\x00\x00\x00\x59\x00\x00\x00\xac\x00\x00\x00\
\xbf\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x31\x00\x00\x00\xc8\x00\x00\x00\
\x7b\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00\
\x6a\x00\x00\x00\xcd\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x0e\x00\x00\x00\xb2\x00\x00\x00\x7f\x00\x00\x00\
\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1b\x00\x00\x00\
\x69\x00\x00\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x6a\x00\x00\x00\xbf\x00\x00\x00\x1c\x00\x00\x00\
\x00\x00\x00\x00\x57\x00\x00\x00\xbd\x00\x00\x00\x11\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x26\x00\x00\x00\xb4\x00\x00\x00\
\xd5\x00\x00\x00\xb4\x00\x00\x00\x27\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x08\x00\x00\x00\xac\x00\x00\x00\x7f\x00\x00\x00\
\x02\x00\x00\x00\x9f\x00\x00\x00\x72\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x36\x00\x00\x00\xc3\x00\x00\x00\x8a\x00\x00\x00\
\x1a\x00\x00\x00\x89\x00\x00\x00\xc3\x00\x00\x00\x36\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x59\x00\x00\x00\xcb\x00\x00\x00\
\x10\x00\x00\x00\xbb\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\
\x31\x00\x00\x00\xc9\x00\x00\x00\x76\x00\x00\x00\x05\x00\x00\x00\
\x09\x00\x00\x00\x05\x00\x00\x00\x75\x00\x00\x00\xc9\x00\x00\x00\
\x31\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x00\x00\xdf\x00\x00\x00\
\x17\x00\x00\x00\xc0\x00\x00\x00\x33\x00\x00\x00\x00\x00\x00\x00\
\x24\x00\x00\x00\x48\x00\x00\x00\x01\x00\x00\x00\x52\x00\x00\x00\
\xc0\x00\x00\x00\x52\x00\x00\x00\x01\x00\x00\x00\x48\x00\x00\x00\
\x25\x00\x00\x00\x00\x00\x00\x00\x21\x00\x00\x00\xdd\x00\x00\x00\
\x0e\x00\x00\x00\xba\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x67\x00\x00\x00\xcb\x00\x00\x00\
\x8c\x00\x00\x00\xcb\x00\x00\x00\x67\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00\xde\x00\x00\x00\
\x00\x00\x00\x00\x99\x00\x00\x00\x7a\x00\x00\x00\x00\x00\x00\x00\
\x0a\x00\x00\x00\x7d\x00\x00\x00\xc9\x00\x00\x00\x42\x00\x00\x00\
\x00\x00\x00\x00\x41\x00\x00\x00\xc9\x00\x00\x00\x7d\x00\x00\x00\
\x0a\x00\x00\x00\x00\x00\x00\x00\x61\x00\x00\x00\xc6\x00\x00\x00\
\x00\x00\x00\x00\x4d\x00\x00\x00\xc3\x00\x00\x00\x15\x00\x00\x00\
\x3e\x00\x00\x00\xb4\x00\x00\x00\x31\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x31\x00\x00\x00\xb3\x00\x00\x00\
\x3f\x00\x00\x00\x0a\x00\x00\x00\xb4\x00\x00\x00\x73\x00\x00\x00\
\x00\x00\x00\x00\x09\x00\x00\x00\xa7\x00\x00\x00\x90\x00\x00\x00\
\x10\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\
\x0b\x00\x00\x00\x7b\x00\x00\x00\xb6\x00\x00\x00\x15\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\xbf\x00\x00\x00\
\x8f\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\
\x7f\x00\x00\x00\xc8\x00\x00\x00\x33\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\
\xa7\x00\x00\x00\xc3\x00\x00\x00\x7a\x00\x00\x00\x44\x00\x00\x00\
\x33\x00\x00\x00\x41\x00\x00\x00\x72\x00\x00\x00\xbd\x00\x00\x00\
\xb2\x00\x00\x00\x31\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x09\x00\x00\x00\x4d\x00\x00\x00\x99\x00\x00\x00\xba\x00\x00\x00\
\xc0\x00\x00\x00\xbb\x00\x00\x00\x9f\x00\x00\x00\x57\x00\x00\x00\
\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\
\x17\x00\x00\x00\x10\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x80\x00\x00\x00\xc0\x01\x00\x00\
\x00\x00\x04\x7e\
\x00\
\x00\x01\x00\x01\x00\x10\x10\x00\x00\x01\x00\x20\x00\x68\x04\x00\
\x00\x16\x00\x00\x00\x28\x00\x00\x00\x10\x00\x00\x00\x20\x00\x00\
\x00\x01\x00\x20\x00\x00\x00\x00\x00\x00\x04\x00\x00\x8c\xb8\x00\
\x00\x8c\xb8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x15\x00\x00\x00\x73\x00\x00\x00\xc6\x00\x00\x00\xde\x00\x00\x00\
\xdd\x00\x00\x00\xdf\x00\x00\x00\xcb\x00\x00\x00\x7f\x00\x00\x00\
\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x33\x00\x00\x00\
\xb6\x00\x00\x00\xb4\x00\x00\x00\x61\x00\x00\x00\x2f\x00\x00\x00\
\x21\x00\x00\x00\x2c\x00\x00\x00\x59\x00\x00\x00\xac\x00\x00\x00\
\xbf\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x31\x00\x00\x00\xc8\x00\x00\x00\
\x7a\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00\x00\x00\
\x69\x00\x00\x00\xcd\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x0e\x00\x00\x00\xb2\x00\x00\x00\x7f\x00\x00\x00\
\x0c\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\
\x0b\x00\x00\x00\x69\x00\x00\x00\xbf\x00\x00\x00\x1c\x00\x00\x00\
\x00\x00\x00\x00\x57\x00\x00\x00\xbd\x00\x00\x00\x0e\x00\x00\x00\
\x38\x00\x00\x00\xb9\x00\x00\x00\x3c\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x32\x00\x00\x00\xb7\x00\x00\x00\
\x46\x00\x00\x00\x04\x00\x00\x00\xac\x00\x00\x00\x7f\x00\x00\x00\
\x02\x00\x00\x00\x9f\x00\x00\x00\x72\x00\x00\x00\x00\x00\x00\x00\
\x06\x00\x00\x00\x70\x00\x00\x00\xcd\x00\x00\x00\x4e\x00\x00\x00\
\x00\x00\x00\x00\x43\x00\x00\x00\xc9\x00\x00\x00\x7c\x00\x00\x00\
\x09\x00\x00\x00\x00\x00\x00\x00\x59\x00\x00\x00\xcb\x00\x00\x00\
\x10\x00\x00\x00\xbb\x00\x00\x00\x41\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x5a\x00\x00\x00\xcc\x00\x00\x00\
\x95\x00\x00\x00\xcb\x00\x00\x00\x66\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x2c\x00\x00\x00\xdf\x00\x00\x00\
\x17\x00\x00\x00\xc0\x00\x00\x00\x33\x00\x00\x00\x00\x00\x00\x00\
\x24\x00\x00\x00\x52\x00\x00\x00\x03\x00\x00\x00\x46\x00\x00\x00\
\xb8\x00\x00\x00\x51\x00\x00\x00\x01\x00\x00\x00\x4d\x00\x00\x00\
\x2b\x00\x00\x00\x00\x00\x00\x00\x21\x00\x00\x00\xdd\x00\x00\x00\
\x0e\x00\x00\x00\xba\x00\x00\x00\x44\x00\x00\x00\x00\x00\x00\x00\
\x29\x00\x00\x00\xc4\x00\x00\x00\x83\x00\x00\x00\x08\x00\x00\x00\
\x05\x00\x00\x00\x05\x00\x00\x00\x76\x00\x00\x00\xc9\x00\x00\x00\
\x34\x00\x00\x00\x00\x00\x00\x00\x2f\x00\x00\x00\xde\x00\x00\x00\
\x00\x00\x00\x00\x99\x00\x00\x00\x7a\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x2c\x00\x00\x00\xbb\x00\x00\x00\x96\x00\x00\x00\
\x20\x00\x00\x00\x8a\x00\x00\x00\xc3\x00\x00\x00\x35\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x61\x00\x00\x00\xc6\x00\x00\x00\
\x00\x00\x00\x00\x4d\x00\x00\x00\xc3\x00\x00\x00\x18\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x1e\x00\x00\x00\xaa\x00\x00\x00\
\xd9\x00\x00\x00\xb3\x00\x00\x00\x26\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x0d\x00\x00\x00\xb4\x00\x00\x00\x73\x00\x00\x00\
\x00\x00\x00\x00\x09\x00\x00\x00\xa7\x00\x00\x00\x90\x00\x00\x00\
\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\
\x60\x00\x00\x00\x1b\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x01\x00\x00\x00\x7b\x00\x00\x00\xb6\x00\x00\x00\x15\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\xbf\x00\x00\x00\
\x90\x00\x00\x00\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00\
\x7f\x00\x00\x00\xc8\x00\x00\x00\x33\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x25\x00\x00\x00\
\xa7\x00\x00\x00\xc3\x00\x00\x00\x7a\x00\x00\x00\x44\x00\x00\x00\
\x33\x00\x00\x00\x41\x00\x00\x00\x72\x00\x00\x00\xbd\x00\x00\x00\
\xb2\x00\x00\x00\x31\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x09\x00\x00\x00\x4d\x00\x00\x00\x99\x00\x00\x00\xba\x00\x00\x00\
\xc0\x00\x00\x00\xbb\x00\x00\x00\x9f\x00\x00\x00\x57\x00\x00\x00\
\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\
\x17\x00\x00\x00\x10\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x80\x00\x00\x00\xc0\x01\x00\x00\
"
qt_resource_name = b"\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x0d\
\x00\x26\xc3\x1f\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x5f\x00\x73\x00\x68\x00\x6f\x00\x77\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x0d\
\x00\xec\xc5\xff\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x5f\x00\x68\x00\x69\x00\x64\x00\x65\x00\x2e\x00\x69\x00\x63\x00\x6f\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x30\x00\x00\x00\x00\x00\x01\x00\x00\x04\x82\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x70\x75\x18\xbb\x02\
\x00\x00\x00\x30\x00\x00\x00\x00\x00\x01\x00\x00\x04\x82\
\x00\x00\x01\x70\x75\x18\x61\x71\
"
qt_version = [int(v) for v in QtCore.qVersion().split('.')]
if qt_version < [5, 8, 0]:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 53.709524
| 103
| 0.72666
| 2,649
| 11,279
| 3.079653
| 0.070215
| 1.206913
| 1.386737
| 1.306203
| 0.912356
| 0.912356
| 0.900834
| 0.78279
| 0.776906
| 0.773229
| 0
| 0.438627
| 0.026332
| 11,279
| 209
| 104
| 53.966507
| 0.304225
| 0.013476
| 0
| 0.497409
| 0
| 0.797927
| 0.00009
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0.010363
| false
| 0
| 0.005181
| 0
| 0.015544
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 13
|
0c1f8bd32b092f70e1393cbf1580166224692263
| 741
|
py
|
Python
|
lshash-master/test.py
|
SinhaUddeshya/Webscience
|
fe867a25a7ca1538fdf7a3e294d7b45a42cc28ff
|
[
"MIT"
] | null | null | null |
lshash-master/test.py
|
SinhaUddeshya/Webscience
|
fe867a25a7ca1538fdf7a3e294d7b45a42cc28ff
|
[
"MIT"
] | null | null | null |
lshash-master/test.py
|
SinhaUddeshya/Webscience
|
fe867a25a7ca1538fdf7a3e294d7b45a42cc28ff
|
[
"MIT"
] | null | null | null |
from lshash import LSHash
#
lsh = LSHash(6, 8)
#
lsh.index([1, 2, 3, 4, 5, 6, 7, 8])
lsh.index([2, 3, 4, 5, 6, 7, 8, 9])
lsh.index([10, 12, 99, 1, 5, 31, 2, 3])
lsh.query([1, 2, 3, 4, 5, 6, 7, 7])
print(lsh.query([1, 2, 3, 4, 5, 6, 7, 7]))
#
# # another method
# lsh.index([[1, 2, 3, 4, 5, 6, 7, 8],
# [2, 3, 4, 5, 6, 7, 8, 9],
# [4, 2, 3, 1, 5, 6, 7, 8],
# [1, 3, 4, 5, 6, 7, 8, 9],
# [10, 12, 99, 1, 5, 31, 2, 3]])
lsh.index([[1, 2, 3, 4, 5, 6, 7, 8],
[2, 3, 4, 5, 6, 7, 8, 9],
[4, 2, 3, 1, 5, 6, 7, 8],
[1, 3, 4, 5, 6, 7, 8, 9],
[10, 12, 99, 1, 5, 31, 2, 3]])
print(lsh.query([1, 2, 3, 4, 5, 6, 7, 7], distance_func='jaccard'))
| 32.217391
| 68
| 0.377868
| 159
| 741
| 1.754717
| 0.157233
| 0.100358
| 0.139785
| 0.157706
| 0.709677
| 0.709677
| 0.709677
| 0.709677
| 0.681004
| 0.637993
| 0
| 0.294606
| 0.349528
| 741
| 22
| 69
| 33.681818
| 0.284232
| 0.275304
| 0
| 0
| 0
| 0
| 0.013917
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0.153846
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c3b2c3e7d6e4099b204208aba3f9aa83fdb079a
| 6,123
|
py
|
Python
|
orgs/forms.py
|
jpaav/comm
|
514d62b8c3ed301dee559538825ad8f253e25fc8
|
[
"MIT"
] | null | null | null |
orgs/forms.py
|
jpaav/comm
|
514d62b8c3ed301dee559538825ad8f253e25fc8
|
[
"MIT"
] | 65
|
2018-06-08T02:59:14.000Z
|
2018-06-22T20:42:54.000Z
|
orgs/forms.py
|
jpaav/comm
|
514d62b8c3ed301dee559538825ad8f253e25fc8
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from django import forms
from orgs.models import Org
from patientlog.models import Tag, Resident
class SelectResidentsField(forms.ModelMultipleChoiceField):
def label_from_instance(self, obj):
return "%s" % obj.get_full_name()
class CreateOrgForm(forms.Form):
name = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
description = forms.CharField(required=False, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
location = forms.CharField(required=False, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
def clean_location(self):
location = self.cleaned_data['location']
return location
def clean_name(self):
name = self.cleaned_data['name']
return name
def clean_description(self):
description = self.cleaned_data['description']
return description
def save(self, commit=True):
return Org(
name=self.cleaned_data['name'],
description=self.cleaned_data['description'],
location=self.cleaned_data['location']
)
class CreateTagForm(forms.Form):
IMPORTANCE_CHOICES = [
('1', '!'),
('2', '!!'),
('3', '!!!'),
]
title = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
color = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
importance = forms.ChoiceField(required=True, initial=1, choices=IMPORTANCE_CHOICES, widget=forms.Select(
attrs={'type': 'text',
'class': 'form-control'}))
should_email = forms.NullBooleanField(required=False, widget=forms.NullBooleanSelect(
attrs={'type': 'text',
'class': 'form-control'}))
def clean_title(self):
title = self.cleaned_data['title']
return title
def clean_color(self):
color = self.cleaned_data['color']
# Remove hashtag if present
if color[0] == '#':
color = color[:-1]
if not len(color) == 6:
color = 'e9ecef'
try:
int(color, 16)
except ValueError:
color = 'e9ecef'
return color
def save(self, commit=True):
return Tag(
title=self.cleaned_data['title'],
color=self.clean_color(),
importance=self.cleaned_data['importance'],
should_email=self.cleaned_data['should_email']
)
class CreateResidentForm(forms.Form):
org = Org()
name = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
room = forms.CharField(required=False, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
timestamp_admitted = forms.DateTimeField(label='Last Admitted', initial=datetime.now(), required=False, input_formats=['%Y-%m-%dT%H:%M'], widget=forms.DateTimeInput(
attrs={'type': 'datetime-local',
'class': 'form-control'}))
timestamp_left = forms.DateTimeField(label='Last left', initial=datetime.now(), required=False, input_formats=['%Y-%m-%dT%H:%M'], widget=forms.DateTimeInput(
attrs={'type': 'datetime-local',
'class': 'form-control'}))
advocates = SelectResidentsField(
queryset=None, required=False, widget=forms.SelectMultiple(
attrs={'type': 'input',
'class': 'form-control'}))
def clean_name(self):
name = self.cleaned_data['name']
return name
def clean_room(self):
room = self.cleaned_data['room']
return room
def __init__(self, *args, **kwargs):
self.org = kwargs.pop('org', None)
super(CreateResidentForm, self).__init__(*args, **kwargs)
if self.org:
self.fields['advocates'].queryset = self.org.members.all()
def save(self, commit=True):
return Resident(
name=self.cleaned_data['name'],
room=self.cleaned_data['room'],
timestamp_admitted=self.cleaned_data['timestamp_admitted'],
timestamp_left=self.cleaned_data['timestamp_left'],
)
class UpdateTagForm(forms.Form):
IMPORTANCE_CHOICES = [
('1', '!'),
('2', '!!'),
('3', '!!!'),
]
title = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
color = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
importance = forms.ChoiceField(required=True, initial=1, choices=IMPORTANCE_CHOICES, widget=forms.Select(
attrs={'type': 'text',
'class': 'form-control'}))
should_email = forms.NullBooleanField(required=False, widget=forms.NullBooleanSelect(
attrs={'type': 'text',
'class': 'form-control'}))
def clean_color(self):
color = self.cleaned_data['color']
# Remove hashtag if present
if color[0] == '#':
color = color[1:]
if not len(color) == 6:
color = 'e9ecef'
try:
int(color, 16)
except ValueError:
color = 'e9ecef'
return color
def save(self, commit=True):
return Tag(
title=self.cleaned_data['title'],
color=self.clean_color(),
importance=self.cleaned_data['importance'],
should_email=self.cleaned_data['should_email']
)
class UpdateResidentForm(forms.Form):
org = Org()
name = forms.CharField(required=True, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
room = forms.CharField(required=False, widget=forms.TextInput(
attrs={'type': 'text',
'class': 'form-control'}))
timestamp_admitted = forms.DateTimeField(label='Last Admitted', initial=datetime.now(), required=False, input_formats=['%Y-%m-%dT%H:%M'], widget=forms.DateTimeInput(
attrs={'type': 'datetime-local',
'class': 'form-control'}))
timestamp_left = forms.DateTimeField(label='Last left', initial=datetime.now(), required=False, input_formats=['%Y-%m-%dT%H:%M'], widget=forms.DateTimeInput(
attrs={'type': 'datetime-local',
'class': 'form-control'}))
advocates = SelectResidentsField(
queryset=None, required=False, widget=forms.SelectMultiple(
attrs={'type': 'input',
'class': 'form-control'}))
def __init__(self, *args, **kwargs):
self.org = kwargs.pop('org', None)
super(UpdateResidentForm, self).__init__(*args, **kwargs)
if self.org:
self.fields['advocates'].queryset = self.org.members.all()
def save(self, commit=True):
return self.cleaned_data
| 27.958904
| 166
| 0.689205
| 749
| 6,123
| 5.535381
| 0.136182
| 0.05837
| 0.079595
| 0.065123
| 0.861312
| 0.800289
| 0.793777
| 0.793777
| 0.791848
| 0.791848
| 0
| 0.004148
| 0.133758
| 6,123
| 218
| 167
| 28.087156
| 0.777526
| 0.008329
| 0
| 0.75
| 0
| 0
| 0.148295
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095238
| false
| 0
| 0.059524
| 0.035714
| 0.422619
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c41c0fc6c8a8a6e2165c3d34bd5726232eb0201
| 6,583
|
py
|
Python
|
terrascript/openstack/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 507
|
2017-07-26T02:58:38.000Z
|
2022-01-21T12:35:13.000Z
|
terrascript/openstack/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 135
|
2017-07-20T12:01:59.000Z
|
2021-10-04T22:25:40.000Z
|
terrascript/openstack/r.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 81
|
2018-02-20T17:55:28.000Z
|
2022-01-31T07:08:40.000Z
|
# terrascript/openstack/r.py
import terrascript
class openstack_blockstorage_quotaset_v2(terrascript.Resource):
pass
class openstack_blockstorage_quotaset_v3(terrascript.Resource):
pass
class openstack_blockstorage_volume_v1(terrascript.Resource):
pass
class openstack_blockstorage_volume_v2(terrascript.Resource):
pass
class openstack_blockstorage_volume_v3(terrascript.Resource):
pass
class openstack_blockstorage_volume_attach_v2(terrascript.Resource):
pass
class openstack_blockstorage_volume_attach_v3(terrascript.Resource):
pass
class openstack_compute_flavor_v2(terrascript.Resource):
pass
class openstack_compute_flavor_access_v2(terrascript.Resource):
pass
class openstack_compute_instance_v2(terrascript.Resource):
pass
class openstack_compute_interface_attach_v2(terrascript.Resource):
pass
class openstack_compute_keypair_v2(terrascript.Resource):
pass
class openstack_compute_secgroup_v2(terrascript.Resource):
pass
class openstack_compute_servergroup_v2(terrascript.Resource):
pass
class openstack_compute_quotaset_v2(terrascript.Resource):
pass
class openstack_compute_floatingip_v2(terrascript.Resource):
pass
class openstack_compute_floatingip_associate_v2(terrascript.Resource):
pass
class openstack_compute_volume_attach_v2(terrascript.Resource):
pass
class openstack_containerinfra_clustertemplate_v1(terrascript.Resource):
pass
class openstack_containerinfra_cluster_v1(terrascript.Resource):
pass
class openstack_db_instance_v1(terrascript.Resource):
pass
class openstack_db_user_v1(terrascript.Resource):
pass
class openstack_db_configuration_v1(terrascript.Resource):
pass
class openstack_db_database_v1(terrascript.Resource):
pass
class openstack_dns_recordset_v2(terrascript.Resource):
pass
class openstack_dns_zone_v2(terrascript.Resource):
pass
class openstack_fw_firewall_v1(terrascript.Resource):
pass
class openstack_fw_policy_v1(terrascript.Resource):
pass
class openstack_fw_rule_v1(terrascript.Resource):
pass
class openstack_identity_endpoint_v3(terrascript.Resource):
pass
class openstack_identity_project_v3(terrascript.Resource):
pass
class openstack_identity_role_v3(terrascript.Resource):
pass
class openstack_identity_role_assignment_v3(terrascript.Resource):
pass
class openstack_identity_service_v3(terrascript.Resource):
pass
class openstack_identity_user_v3(terrascript.Resource):
pass
class openstack_identity_group_v3(terrascript.Resource):
pass
class openstack_identity_application_credential_v3(terrascript.Resource):
pass
class openstack_identity_ec2_credential_v3(terrascript.Resource):
pass
class openstack_images_image_v2(terrascript.Resource):
pass
class openstack_images_image_access_v2(terrascript.Resource):
pass
class openstack_images_image_access_accept_v2(terrascript.Resource):
pass
class openstack_lb_member_v1(terrascript.Resource):
pass
class openstack_lb_monitor_v1(terrascript.Resource):
pass
class openstack_lb_pool_v1(terrascript.Resource):
pass
class openstack_lb_vip_v1(terrascript.Resource):
pass
class openstack_lb_loadbalancer_v2(terrascript.Resource):
pass
class openstack_lb_listener_v2(terrascript.Resource):
pass
class openstack_lb_pool_v2(terrascript.Resource):
pass
class openstack_lb_member_v2(terrascript.Resource):
pass
class openstack_lb_members_v2(terrascript.Resource):
pass
class openstack_lb_monitor_v2(terrascript.Resource):
pass
class openstack_lb_l7policy_v2(terrascript.Resource):
pass
class openstack_lb_l7rule_v2(terrascript.Resource):
pass
class openstack_networking_floatingip_v2(terrascript.Resource):
pass
class openstack_networking_floatingip_associate_v2(terrascript.Resource):
pass
class openstack_networking_network_v2(terrascript.Resource):
pass
class openstack_networking_port_v2(terrascript.Resource):
pass
class openstack_networking_rbac_policy_v2(terrascript.Resource):
pass
class openstack_networking_port_secgroup_associate_v2(terrascript.Resource):
pass
class openstack_networking_qos_bandwidth_limit_rule_v2(terrascript.Resource):
pass
class openstack_networking_qos_dscp_marking_rule_v2(terrascript.Resource):
pass
class openstack_networking_qos_minimum_bandwidth_rule_v2(terrascript.Resource):
pass
class openstack_networking_qos_policy_v2(terrascript.Resource):
pass
class openstack_networking_quota_v2(terrascript.Resource):
pass
class openstack_networking_router_v2(terrascript.Resource):
pass
class openstack_networking_router_interface_v2(terrascript.Resource):
pass
class openstack_networking_router_route_v2(terrascript.Resource):
pass
class openstack_networking_secgroup_v2(terrascript.Resource):
pass
class openstack_networking_secgroup_rule_v2(terrascript.Resource):
pass
class openstack_networking_subnet_v2(terrascript.Resource):
pass
class openstack_networking_subnet_route_v2(terrascript.Resource):
pass
class openstack_networking_subnetpool_v2(terrascript.Resource):
pass
class openstack_networking_addressscope_v2(terrascript.Resource):
pass
class openstack_networking_trunk_v2(terrascript.Resource):
pass
class openstack_objectstorage_container_v1(terrascript.Resource):
pass
class openstack_objectstorage_object_v1(terrascript.Resource):
pass
class openstack_objectstorage_tempurl_v1(terrascript.Resource):
pass
class openstack_orchestration_stack_v1(terrascript.Resource):
pass
class openstack_vpnaas_ipsec_policy_v2(terrascript.Resource):
pass
class openstack_vpnaas_service_v2(terrascript.Resource):
pass
class openstack_vpnaas_ike_policy_v2(terrascript.Resource):
pass
class openstack_vpnaas_endpoint_group_v2(terrascript.Resource):
pass
class openstack_vpnaas_site_connection_v2(terrascript.Resource):
pass
class openstack_sharedfilesystem_securityservice_v2(terrascript.Resource):
pass
class openstack_sharedfilesystem_sharenetwork_v2(terrascript.Resource):
pass
class openstack_sharedfilesystem_share_v2(terrascript.Resource):
pass
class openstack_sharedfilesystem_share_access_v2(terrascript.Resource):
pass
class openstack_keymanager_secret_v1(terrascript.Resource):
pass
class openstack_keymanager_container_v1(terrascript.Resource):
pass
class openstack_keymanager_order_v1(terrascript.Resource):
pass
| 18.134986
| 79
| 0.821054
| 761
| 6,583
| 6.701708
| 0.127464
| 0.247059
| 0.405882
| 0.488627
| 0.894706
| 0.894706
| 0.818431
| 0.393137
| 0.054902
| 0
| 0
| 0.016121
| 0.123652
| 6,583
| 362
| 80
| 18.185083
| 0.867915
| 0.00395
| 0
| 0.497238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.497238
| 0.005525
| 0
| 0.502762
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 9
|
a76da57013f04c9106f4177d1cbdc41f1cd50883
| 7,538
|
py
|
Python
|
fonts/DejaVuSans_10.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | null | null | null |
fonts/DejaVuSans_10.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | null | null | null |
fonts/DejaVuSans_10.py
|
ironss/micropython-lib
|
61719636dad9aaa581c8e39e71ccc515e75c2d43
|
[
"MIT"
] | 2
|
2019-09-24T13:36:55.000Z
|
2020-04-18T02:05:38.000Z
|
# Code generated by font-to-py.py.
# Font: DejaVuSans.ttf
version = '0.26'
def height():
return 10
def max_width():
return 11
def hmap():
return False
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x05\x00\x02\x00\xb2\x00\x0a\x00\x06\x00\x00\x00\x03\x00\x00\x00'\
b'\x00\x00\x00\x00\x04\x00\xbe\x00\x00\x00\x00\x00\x00\x00\x05\x00'\
b'\x0e\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x08\x00\x20\x00\xe8\x00'\
b'\x3e\x00\x28\x00\xf8\x00\x2e\x00\x08\x00\x00\x00\x06\x00\x98\x00'\
b'\x94\x00\xfe\x01\xa4\x00\x64\x00\x00\x00\x0a\x00\x1e\x00\x12\x00'\
b'\xde\x00\x30\x00\x18\x00\xf6\x00\x90\x00\xf0\x00\x00\x00\x00\x00'\
b'\x09\x00\x70\x00\xcc\x00\x92\x00\x92\x00\x64\x00\x40\x00\xb0\x00'\
b'\x00\x00\x00\x00\x03\x00\x0e\x00\x00\x00\x00\x00\x04\x00\xfe\x00'\
b'\x01\x01\x00\x00\x00\x00\x04\x00\x83\x01\x7c\x00\x00\x00\x00\x00'\
b'\x05\x00\x12\x00\x0c\x00\x1e\x00\x0c\x00\x12\x00\x08\x00\x10\x00'\
b'\x10\x00\x10\x00\xfe\x00\x10\x00\x10\x00\x10\x00\x00\x00\x03\x00'\
b'\x80\x01\x00\x00\x00\x00\x04\x00\x20\x00\x20\x00\x20\x00\x00\x00'\
b'\x03\x00\x80\x00\x00\x00\x00\x00\x03\x00\x80\x01\x78\x00\x06\x00'\
b'\x06\x00\x7c\x00\x82\x00\x82\x00\x82\x00\x7c\x00\x00\x00\x06\x00'\
b'\x82\x00\x82\x00\xfe\x00\x80\x00\x80\x00\x00\x00\x06\x00\x84\x00'\
b'\xc2\x00\xa2\x00\x92\x00\x8c\x00\x00\x00\x06\x00\x44\x00\x92\x00'\
b'\x92\x00\x92\x00\x6c\x00\x00\x00\x06\x00\x30\x00\x28\x00\x24\x00'\
b'\xfe\x00\x20\x00\x00\x00\x06\x00\x8e\x00\x8a\x00\x8a\x00\x8a\x00'\
b'\x70\x00\x00\x00\x06\x00\x7c\x00\x96\x00\x92\x00\x92\x00\x62\x00'\
b'\x00\x00\x06\x00\x02\x00\x82\x00\x62\x00\x1a\x00\x06\x00\x00\x00'\
b'\x06\x00\x6c\x00\x92\x00\x92\x00\x92\x00\x6c\x00\x00\x00\x06\x00'\
b'\x8c\x00\x92\x00\x92\x00\xd2\x00\x7c\x00\x00\x00\x03\x00\x88\x00'\
b'\x00\x00\x00\x00\x03\x00\x88\x01\x00\x00\x00\x00\x08\x00\x10\x00'\
b'\x10\x00\x28\x00\x28\x00\x28\x00\x44\x00\x00\x00\x00\x00\x08\x00'\
b'\x28\x00\x28\x00\x28\x00\x28\x00\x28\x00\x28\x00\x00\x00\x00\x00'\
b'\x08\x00\x44\x00\x28\x00\x28\x00\x28\x00\x10\x00\x10\x00\x00\x00'\
b'\x00\x00\x05\x00\x02\x00\xb2\x00\x0a\x00\x06\x00\x00\x00\x0b\x00'\
b'\xf8\x00\x8c\x01\x06\x03\x72\x02\x52\x02\x72\x02\x42\x01\x64\x00'\
b'\x38\x00\x00\x00\x00\x00\x07\x00\x80\x00\x70\x00\x2c\x00\x22\x00'\
b'\x2c\x00\x70\x00\x80\x00\x07\x00\xfe\x00\x92\x00\x92\x00\x92\x00'\
b'\x6c\x00\x00\x00\x00\x00\x08\x00\x38\x00\x44\x00\x82\x00\x82\x00'\
b'\x82\x00\x44\x00\x00\x00\x00\x00\x08\x00\xfe\x00\x82\x00\x82\x00'\
b'\x82\x00\xc6\x00\x7c\x00\x00\x00\x00\x00\x07\x00\xfe\x00\x92\x00'\
b'\x92\x00\x92\x00\x92\x00\x00\x00\x00\x00\x06\x00\xfe\x00\x12\x00'\
b'\x12\x00\x12\x00\x00\x00\x00\x00\x08\x00\x7c\x00\xc6\x00\x82\x00'\
b'\x92\x00\x92\x00\x74\x00\x00\x00\x00\x00\x08\x00\xfe\x00\x10\x00'\
b'\x10\x00\x10\x00\x10\x00\xfe\x00\x00\x00\x00\x00\x03\x00\xfe\x00'\
b'\x00\x00\x00\x00\x03\x00\x00\x02\x00\x02\xfe\x01\x07\x00\xfe\x00'\
b'\x10\x00\x28\x00\x44\x00\x82\x00\x00\x00\x00\x00\x06\x00\xfe\x00'\
b'\x80\x00\x80\x00\x80\x00\x80\x00\x00\x00\x09\x00\xfe\x00\x0c\x00'\
b'\x30\x00\x40\x00\x30\x00\x0c\x00\xfe\x00\x00\x00\x00\x00\x08\x00'\
b'\xfe\x00\x04\x00\x18\x00\x20\x00\x40\x00\xfe\x00\x00\x00\x00\x00'\
b'\x08\x00\x7c\x00\xc6\x00\x82\x00\x82\x00\xc6\x00\x7c\x00\x00\x00'\
b'\x00\x00\x07\x00\xfe\x00\x12\x00\x12\x00\x12\x00\x0c\x00\x00\x00'\
b'\x00\x00\x08\x00\x7c\x00\xc6\x00\x82\x00\x82\x00\x46\x01\x3c\x00'\
b'\x00\x00\x00\x00\x07\x00\xfe\x00\x12\x00\x12\x00\x32\x00\x4c\x00'\
b'\x80\x00\x00\x00\x07\x00\x4c\x00\x92\x00\x92\x00\x92\x00\x64\x00'\
b'\x00\x00\x00\x00\x05\x00\x02\x00\x02\x00\xfe\x00\x02\x00\x02\x00'\
b'\x08\x00\x7e\x00\x80\x00\x80\x00\x80\x00\x80\x00\x7e\x00\x00\x00'\
b'\x00\x00\x07\x00\x06\x00\x18\x00\x60\x00\x80\x00\x60\x00\x18\x00'\
b'\x06\x00\x09\x00\x06\x00\x38\x00\xc0\x00\x38\x00\x06\x00\x38\x00'\
b'\xc0\x00\x38\x00\x06\x00\x06\x00\x82\x00\xc6\x00\x38\x00\x38\x00'\
b'\xc6\x00\x82\x00\x07\x00\x02\x00\x04\x00\x08\x00\xf0\x00\x08\x00'\
b'\x04\x00\x02\x00\x06\x00\x82\x00\xc2\x00\xb2\x00\x9a\x00\x86\x00'\
b'\x82\x00\x04\x00\xff\x01\x01\x01\x00\x00\x00\x00\x03\x00\x06\x00'\
b'\x78\x00\x80\x01\x04\x00\x01\x01\xff\x01\x00\x00\x00\x00\x08\x00'\
b'\x08\x00\x04\x00\x02\x00\x02\x00\x04\x00\x08\x00\x00\x00\x00\x00'\
b'\x05\x00\x00\x01\x00\x01\x00\x01\x00\x01\x00\x01\x05\x00\x01\x00'\
b'\x02\x00\x00\x00\x00\x00\x00\x00\x06\x00\xc0\x00\xa8\x00\xa8\x00'\
b'\xa8\x00\xf0\x00\x00\x00\x06\x00\xff\x00\x88\x00\x88\x00\x88\x00'\
b'\x70\x00\x00\x00\x05\x00\x70\x00\x88\x00\x88\x00\x88\x00\x00\x00'\
b'\x06\x00\x70\x00\x88\x00\x88\x00\x88\x00\xff\x00\x00\x00\x06\x00'\
b'\x70\x00\xa8\x00\xa8\x00\xa8\x00\xb0\x00\x00\x00\x04\x00\x08\x00'\
b'\xff\x00\x09\x00\x01\x00\x06\x00\x70\x00\x88\x02\x88\x02\x88\x02'\
b'\xf8\x01\x00\x00\x06\x00\xff\x00\x08\x00\x08\x00\x08\x00\xf0\x00'\
b'\x00\x00\x02\x00\xf9\x00\x00\x00\x02\x00\x00\x02\xf9\x03\x05\x00'\
b'\xff\x00\x20\x00\x50\x00\x88\x00\x00\x00\x02\x00\xff\x00\x00\x00'\
b'\x0a\x00\xf8\x00\x08\x00\x08\x00\x08\x00\xf0\x00\x08\x00\x08\x00'\
b'\x08\x00\xf0\x00\x00\x00\x06\x00\xf8\x00\x08\x00\x08\x00\x08\x00'\
b'\xf0\x00\x00\x00\x06\x00\x70\x00\x88\x00\x88\x00\x88\x00\x70\x00'\
b'\x00\x00\x06\x00\xf8\x03\x88\x00\x88\x00\x88\x00\x70\x00\x00\x00'\
b'\x06\x00\x70\x00\x88\x00\x88\x00\x88\x00\xf8\x03\x00\x00\x04\x00'\
b'\xf8\x00\x08\x00\x08\x00\x00\x00\x05\x00\x98\x00\xa8\x00\xa8\x00'\
b'\xe8\x00\x00\x00\x04\x00\x08\x00\xfe\x00\x88\x00\x88\x00\x06\x00'\
b'\x78\x00\x80\x00\x80\x00\x80\x00\xf8\x00\x00\x00\x06\x00\x18\x00'\
b'\x60\x00\x80\x00\x60\x00\x18\x00\x00\x00\x08\x00\x38\x00\xc0\x00'\
b'\x30\x00\x08\x00\x30\x00\xc0\x00\x38\x00\x00\x00\x06\x00\x88\x00'\
b'\x50\x00\x20\x00\x50\x00\x88\x00\x00\x00\x06\x00\x00\x00\x18\x02'\
b'\x60\x02\x80\x01\x60\x00\x18\x00\x05\x00\x88\x00\xc8\x00\xa8\x00'\
b'\x98\x00\x00\x00\x06\x00\x10\x00\x10\x00\xef\x01\x01\x01\x00\x00'\
b'\x00\x00\x03\x00\xff\x03\x00\x00\x00\x00\x06\x00\x01\x01\xef\x01'\
b'\x10\x00\x10\x00\x00\x00\x00\x00\x08\x00\x20\x00\x10\x00\x10\x00'\
b'\x20\x00\x20\x00\x10\x00\x00\x00\x00\x00'
_index =\
b'\x00\x00\x0c\x00\x14\x00\x1e\x00\x2a\x00\x3c\x00\x4a\x00\x60\x00'\
b'\x74\x00\x7c\x00\x86\x00\x90\x00\x9c\x00\xae\x00\xb6\x00\xc0\x00'\
b'\xc8\x00\xd0\x00\xde\x00\xec\x00\xfa\x00\x08\x01\x16\x01\x24\x01'\
b'\x32\x01\x40\x01\x4e\x01\x5c\x01\x64\x01\x6c\x01\x7e\x01\x90\x01'\
b'\xa2\x01\xae\x01\xc6\x01\xd6\x01\xe6\x01\xf8\x01\x0a\x02\x1a\x02'\
b'\x28\x02\x3a\x02\x4c\x02\x54\x02\x5c\x02\x6c\x02\x7a\x02\x8e\x02'\
b'\xa0\x02\xb2\x02\xc2\x02\xd4\x02\xe4\x02\xf4\x02\x00\x03\x12\x03'\
b'\x22\x03\x36\x03\x44\x03\x54\x03\x62\x03\x6c\x03\x74\x03\x7e\x03'\
b'\x90\x03\x9c\x03\xa8\x03\xb6\x03\xc4\x03\xd0\x03\xde\x03\xec\x03'\
b'\xf6\x03\x04\x04\x12\x04\x18\x04\x1e\x04\x2a\x04\x30\x04\x46\x04'\
b'\x54\x04\x62\x04\x70\x04\x7e\x04\x88\x04\x94\x04\x9e\x04\xac\x04'\
b'\xba\x04\xcc\x04\xda\x04\xe8\x04\xf4\x04\x02\x05\x0a\x05\x18\x05'\
b'\x2a\x05'
_mvfont = memoryview(_font)
def _chr_addr(ordch):
offset = 2 * (ordch - 32)
return int.from_bytes(_index[offset:offset + 2], 'little')
def get_width(s):
width = 0
for ch in s:
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 32
offset = _chr_addr(ordch)
width += int.from_bytes(_font[offset:offset + 2], 'little')
return width
def get_ch(ch):
ordch = ord(ch)
ordch = ordch + 1 if ordch >= 32 and ordch <= 126 else 32
offset = _chr_addr(ordch)
width = int.from_bytes(_font[offset:offset + 2], 'little')
next_offs = _chr_addr(ordch +1)
return _mvfont[offset + 2:next_offs], width
| 50.590604
| 68
| 0.698328
| 1,761
| 7,538
| 2.974446
| 0.090857
| 0.264605
| 0.238832
| 0.142039
| 0.63173
| 0.5231
| 0.431653
| 0.294769
| 0.191676
| 0.122948
| 0
| 0.392738
| 0.046431
| 7,538
| 148
| 69
| 50.932432
| 0.335977
| 0.007031
| 0
| 0.068182
| 1
| 0.712121
| 0.813528
| 0.809517
| 0
| 1
| 0
| 0
| 0
| 1
| 0.075758
| false
| 0
| 0
| 0.05303
| 0.151515
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a7c4a064aaa24866d77cb990561e53700b9afcde
| 33,094
|
py
|
Python
|
shell/gen-py/ImpalaInternalService/ImpalaInternalService.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
shell/gen-py/ImpalaInternalService/ImpalaInternalService.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
shell/gen-py/ImpalaInternalService/ImpalaInternalService.py
|
suifengzhuliu/impala
|
611f4c6f3b18cfcddff3b2956cbb87c295a87655
|
[
"Apache-2.0"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ExecQueryFInstances(self, params):
"""
Parameters:
- params
"""
pass
def ReportExecStatus(self, params):
"""
Parameters:
- params
"""
pass
def CancelQueryFInstances(self, params):
"""
Parameters:
- params
"""
pass
def TransmitData(self, params):
"""
Parameters:
- params
"""
pass
def UpdateFilter(self, params):
"""
Parameters:
- params
"""
pass
def PublishFilter(self, params):
"""
Parameters:
- params
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ExecQueryFInstances(self, params):
"""
Parameters:
- params
"""
self.send_ExecQueryFInstances(params)
return self.recv_ExecQueryFInstances()
def send_ExecQueryFInstances(self, params):
self._oprot.writeMessageBegin('ExecQueryFInstances', TMessageType.CALL, self._seqid)
args = ExecQueryFInstances_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ExecQueryFInstances(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ExecQueryFInstances_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ExecQueryFInstances failed: unknown result");
def ReportExecStatus(self, params):
"""
Parameters:
- params
"""
self.send_ReportExecStatus(params)
return self.recv_ReportExecStatus()
def send_ReportExecStatus(self, params):
self._oprot.writeMessageBegin('ReportExecStatus', TMessageType.CALL, self._seqid)
args = ReportExecStatus_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ReportExecStatus(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ReportExecStatus_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ReportExecStatus failed: unknown result");
def CancelQueryFInstances(self, params):
"""
Parameters:
- params
"""
self.send_CancelQueryFInstances(params)
return self.recv_CancelQueryFInstances()
def send_CancelQueryFInstances(self, params):
self._oprot.writeMessageBegin('CancelQueryFInstances', TMessageType.CALL, self._seqid)
args = CancelQueryFInstances_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_CancelQueryFInstances(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = CancelQueryFInstances_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "CancelQueryFInstances failed: unknown result");
def TransmitData(self, params):
"""
Parameters:
- params
"""
self.send_TransmitData(params)
return self.recv_TransmitData()
def send_TransmitData(self, params):
self._oprot.writeMessageBegin('TransmitData', TMessageType.CALL, self._seqid)
args = TransmitData_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_TransmitData(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = TransmitData_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "TransmitData failed: unknown result");
def UpdateFilter(self, params):
"""
Parameters:
- params
"""
self.send_UpdateFilter(params)
return self.recv_UpdateFilter()
def send_UpdateFilter(self, params):
self._oprot.writeMessageBegin('UpdateFilter', TMessageType.CALL, self._seqid)
args = UpdateFilter_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_UpdateFilter(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = UpdateFilter_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "UpdateFilter failed: unknown result");
def PublishFilter(self, params):
"""
Parameters:
- params
"""
self.send_PublishFilter(params)
return self.recv_PublishFilter()
def send_PublishFilter(self, params):
self._oprot.writeMessageBegin('PublishFilter', TMessageType.CALL, self._seqid)
args = PublishFilter_args()
args.params = params
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_PublishFilter(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = PublishFilter_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "PublishFilter failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ExecQueryFInstances"] = Processor.process_ExecQueryFInstances
self._processMap["ReportExecStatus"] = Processor.process_ReportExecStatus
self._processMap["CancelQueryFInstances"] = Processor.process_CancelQueryFInstances
self._processMap["TransmitData"] = Processor.process_TransmitData
self._processMap["UpdateFilter"] = Processor.process_UpdateFilter
self._processMap["PublishFilter"] = Processor.process_PublishFilter
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ExecQueryFInstances(self, seqid, iprot, oprot):
args = ExecQueryFInstances_args()
args.read(iprot)
iprot.readMessageEnd()
result = ExecQueryFInstances_result()
result.success = self._handler.ExecQueryFInstances(args.params)
oprot.writeMessageBegin("ExecQueryFInstances", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_ReportExecStatus(self, seqid, iprot, oprot):
args = ReportExecStatus_args()
args.read(iprot)
iprot.readMessageEnd()
result = ReportExecStatus_result()
result.success = self._handler.ReportExecStatus(args.params)
oprot.writeMessageBegin("ReportExecStatus", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_CancelQueryFInstances(self, seqid, iprot, oprot):
args = CancelQueryFInstances_args()
args.read(iprot)
iprot.readMessageEnd()
result = CancelQueryFInstances_result()
result.success = self._handler.CancelQueryFInstances(args.params)
oprot.writeMessageBegin("CancelQueryFInstances", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_TransmitData(self, seqid, iprot, oprot):
args = TransmitData_args()
args.read(iprot)
iprot.readMessageEnd()
result = TransmitData_result()
result.success = self._handler.TransmitData(args.params)
oprot.writeMessageBegin("TransmitData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_UpdateFilter(self, seqid, iprot, oprot):
args = UpdateFilter_args()
args.read(iprot)
iprot.readMessageEnd()
result = UpdateFilter_result()
result.success = self._handler.UpdateFilter(args.params)
oprot.writeMessageBegin("UpdateFilter", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_PublishFilter(self, seqid, iprot, oprot):
args = PublishFilter_args()
args.read(iprot)
iprot.readMessageEnd()
result = PublishFilter_result()
result.success = self._handler.PublishFilter(args.params)
oprot.writeMessageBegin("PublishFilter", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ExecQueryFInstances_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TExecQueryFInstancesParams, TExecQueryFInstancesParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TExecQueryFInstancesParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecQueryFInstances_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ExecQueryFInstances_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TExecQueryFInstancesResult, TExecQueryFInstancesResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TExecQueryFInstancesResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ExecQueryFInstances_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ReportExecStatus_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TReportExecStatusParams, TReportExecStatusParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TReportExecStatusParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ReportExecStatus_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ReportExecStatus_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TReportExecStatusResult, TReportExecStatusResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TReportExecStatusResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ReportExecStatus_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelQueryFInstances_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TCancelQueryFInstancesParams, TCancelQueryFInstancesParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TCancelQueryFInstancesParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelQueryFInstances_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class CancelQueryFInstances_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TCancelQueryFInstancesResult, TCancelQueryFInstancesResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TCancelQueryFInstancesResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('CancelQueryFInstances_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TransmitData_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TTransmitDataParams, TTransmitDataParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TTransmitDataParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TransmitData_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TransmitData_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TTransmitDataResult, TTransmitDataResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TTransmitDataResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TransmitData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateFilter_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TUpdateFilterParams, TUpdateFilterParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TUpdateFilterParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdateFilter_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class UpdateFilter_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TUpdateFilterResult, TUpdateFilterResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TUpdateFilterResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('UpdateFilter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class PublishFilter_args:
"""
Attributes:
- params
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'params', (TPublishFilterParams, TPublishFilterParams.thrift_spec), None, ), # 1
)
def __init__(self, params=None,):
self.params = params
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.params = TPublishFilterParams()
self.params.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('PublishFilter_args')
if self.params is not None:
oprot.writeFieldBegin('params', TType.STRUCT, 1)
self.params.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class PublishFilter_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TPublishFilterResult, TPublishFilterResult.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TPublishFilterResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('PublishFilter_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 30.871269
| 188
| 0.683568
| 3,622
| 33,094
| 5.984263
| 0.040861
| 0.033218
| 0.02782
| 0.054256
| 0.825098
| 0.792249
| 0.764475
| 0.750773
| 0.750773
| 0.736378
| 0
| 0.002201
| 0.203753
| 33,094
| 1,071
| 189
| 30.900093
| 0.820348
| 0.021998
| 0
| 0.815594
| 1
| 0
| 0.034391
| 0.00726
| 0
| 0
| 0
| 0
| 0
| 1
| 0.144802
| false
| 0.007426
| 0.007426
| 0.044554
| 0.292079
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a7c984dc2a0fba79f0335cca009c40aaec8c5db8
| 193,595
|
py
|
Python
|
analysis/analyze_aparent_designed_mpra_helpers.py
|
876lkj/APARENT
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 20
|
2019-04-23T20:35:23.000Z
|
2022-02-02T02:07:06.000Z
|
analysis/analyze_aparent_designed_mpra_helpers.py
|
JoshuaChou2018/aparent
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 6
|
2019-10-14T16:35:00.000Z
|
2021-03-24T17:55:07.000Z
|
analysis/analyze_aparent_designed_mpra_helpers.py
|
JoshuaChou2018/aparent
|
5c8b9c038a46b129b5e0e5ce1453c4725b62322e
|
[
"MIT"
] | 11
|
2019-06-10T08:53:57.000Z
|
2021-01-25T00:54:59.000Z
|
import pandas as pd
import scipy
import numpy as np
import scipy.sparse as sp
from scipy.stats import pearsonr
import operator
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as colors
import matplotlib as mpl
from matplotlib.text import TextPath
from matplotlib.patches import PathPatch, Rectangle
from matplotlib.font_manager import FontProperties
from matplotlib import gridspec
from matplotlib.ticker import FormatStrFormatter
from sklearn.metrics import roc_auc_score
#Sequence Plotting Functions
def letterAt(letter, x, y, yscale=1, ax=None, color=None, alpha=1.0):
#fp = FontProperties(family="Arial", weight="bold")
fp = FontProperties(family="Ubuntu", weight="bold")
globscale = 1.35
LETTERS = { "T" : TextPath((-0.305, 0), "T", size=1, prop=fp),
"G" : TextPath((-0.384, 0), "G", size=1, prop=fp),
"A" : TextPath((-0.35, 0), "A", size=1, prop=fp),
"C" : TextPath((-0.366, 0), "C", size=1, prop=fp),
"UP" : TextPath((-0.488, 0), '$\\Uparrow$', size=1, prop=fp),
"DN" : TextPath((-0.488, 0), '$\\Downarrow$', size=1, prop=fp),
"(" : TextPath((-0.25, 0), "(", size=1, prop=fp),
"." : TextPath((-0.125, 0), "-", size=1, prop=fp),
")" : TextPath((-0.1, 0), ")", size=1, prop=fp)}
COLOR_SCHEME = {'G': 'orange',
'A': 'red',
'C': 'blue',
'T': 'darkgreen',
'UP': 'green',
'DN': 'red',
'(': 'black',
'.': 'black',
')': 'black'}
text = LETTERS[letter]
chosen_color = COLOR_SCHEME[letter]
if color is not None :
chosen_color = color
t = mpl.transforms.Affine2D().scale(1*globscale, yscale*globscale) + \
mpl.transforms.Affine2D().translate(x,y) + ax.transData
p = PathPatch(text, lw=0, fc=chosen_color, alpha=alpha, transform=t)
if ax != None:
ax.add_artist(p)
return p
#PWM Helper Functions
def hamming_distance(seq1, seq2) :
dist = 0
for j in range(0, len(seq1)) :
if seq1[j] != seq2[j] :
dist += 1
return dist
def get_pwm(seqs) :
pwm = np.zeros((len(seqs[0]), 4))
for i in range(0, len(seqs)) :
seq = seqs[i]
for j in range(0, len(seq)) :
if seq[j] == 'A' :
pwm[j, 0] += 1
elif seq[j] == 'C' :
pwm[j, 1] += 1
elif seq[j] == 'G' :
pwm[j, 2] += 1
elif seq[j] == 'T' :
pwm[j, 3] += 1
for j in range(0, pwm.shape[0]) :
pwm[j, :] /= np.sum(pwm[j, :])
return pwm
def one_hot_decode(one_hot) :
seq = ''
for j in range(0, one_hot.shape[0]) :
if one_hot[j, 0] == 1 :
seq += 'A'
elif one_hot[j, 1] == 1 :
seq += 'C'
elif one_hot[j, 2] == 1 :
seq += 'G'
elif one_hot[j, 3] == 1 :
seq += 'T'
else :
seq += 'N'
return seq
def get_consensus(pwm) :
one_hot = np.zeros(pwm.shape)
for j in range(0, pwm.shape[0]) :
if np.sum(pwm[j, :]) > 0.0 :
max_k = np.argmax(pwm[j, :])
one_hot[j, max_k] = 1
return one_hot
def find_wt_pwm(wt_seq, pwms) :
min_i = 0
min_dist = 30
for i in range(0, pwms.shape[0]) :
consensus_seq = one_hot_decode(get_consensus(pwms[i, :, :]))
dist = hamming_distance(consensus_seq[:163], wt_seq[1:])
if dist < min_dist :
min_i = i
min_dist = dist
return pwms[min_i, :, :]
def find_wt_yhat(wt_seq, pwms, cuts) :
min_i = 0
min_dist = 30
for i in range(0, pwms.shape[0]) :
consensus_seq = one_hot_decode(get_consensus(pwms[i, :, :]))
dist = hamming_distance(consensus_seq[:163], wt_seq[1:])
if dist < min_dist :
min_i = i
min_dist = dist
return cuts[min_i, :]
def append_predictions(seq_df, seq_cuts, variant_df, variant_cuts_var, variant_cuts_ref, pred_df, cuts_pred) :
#Join dataframe with prediction table and calculate true cut probabilities
seq_df['row_index_true'] = np.arange(len(seq_df), dtype=np.int)
pred_df['row_index_pred'] = np.arange(len(pred_df), dtype=np.int)
seq_df = seq_df.join(pred_df.set_index('master_seq'), on='master_seq', how='inner').copy().reset_index(drop=True)
seq_cuts = seq_cuts[np.ravel(seq_df['row_index_true'].values), :]
cut_pred = np.array(cuts_pred[np.ravel(seq_df['row_index_pred'].values), :].todense())
cut_pred = np.concatenate([np.zeros((cut_pred.shape[0], 1)), cut_pred[:, :184], cut_pred[:, 185].reshape(-1, 1)], axis=-1)
cut_true = np.concatenate([np.array(seq_cuts[:, 180 + 20: 180 + 205].todense()), np.array(seq_cuts[:, -1].todense()).reshape(-1, 1)], axis=-1)
#Add small pseudo count to true cuts
cut_true += 0.0005
cut_true = cut_true / np.sum(cut_true, axis=-1).reshape(-1, 1)
seq_df['cut_prob_true'] = [cut_true[i, :] for i in range(len(seq_df))]
seq_df['cut_prob_pred'] = [cut_pred[i, :] for i in range(len(seq_df))]
seq_df['iso_pred_from_cuts'] = np.sum(cut_pred[:, 49: 90], axis=-1)
seq_df['logodds_pred_from_cuts'] = np.log(seq_df['iso_pred_from_cuts'] / (1.0 - seq_df['iso_pred_from_cuts']))
seq_df['mean_logodds_pred'] = (seq_df['logodds_pred'] + seq_df['logodds_pred_from_cuts']) / 2.0
#Join variant dataframe with prediction table and calculate true cut probabilities
variant_df['row_index_true'] = np.arange(len(variant_df), dtype=np.int)
variant_df = variant_df.join(pred_df.rename(columns={'iso_pred' : 'iso_pred_var', 'logodds_pred' : 'logodds_pred_var', 'row_index_pred' : 'row_index_pred_var'}).set_index('master_seq'), on='master_seq', how='inner').copy().reset_index(drop=True)
variant_df = variant_df.join(pred_df.rename(columns={'iso_pred' : 'iso_pred_ref', 'logodds_pred' : 'logodds_pred_ref', 'row_index_pred' : 'row_index_pred_ref'}).set_index('master_seq'), on='wt_seq', how='inner').copy().reset_index(drop=True)
variant_cuts_var = variant_cuts_var[np.ravel(variant_df['row_index_true'].values), :]
variant_cuts_ref = variant_cuts_ref[np.ravel(variant_df['row_index_true'].values), :]
cut_true_var = np.concatenate([np.array(variant_cuts_var[:, 180 + 20: 180 + 205].todense()), np.array(variant_cuts_var[:, -1].todense()).reshape(-1, 1)], axis=-1)
#Add small pseudo count to true cuts
cut_true_var += 0.0005
cut_true_var = cut_true_var / np.sum(cut_true_var, axis=-1).reshape(-1, 1)
cut_true_ref = np.concatenate([np.array(variant_cuts_ref[:, 180 + 20: 180 + 205].todense()), np.array(variant_cuts_ref[:, -1].todense()).reshape(-1, 1)], axis=-1)
#Add small pseudo count to true cuts
cut_true_ref += 0.0005
cut_true_ref = cut_true_ref / np.sum(cut_true_ref, axis=-1).reshape(-1, 1)
cut_pred_var = np.array(cuts_pred[np.ravel(variant_df['row_index_pred_var'].values), :].todense())
cut_pred_var = np.concatenate([np.zeros((cut_pred_var.shape[0], 1)), cut_pred_var[:, :184], cut_pred_var[:, 185].reshape(-1, 1)], axis=-1)
cut_pred_ref = np.array(cuts_pred[np.ravel(variant_df['row_index_pred_ref'].values), :].todense())
cut_pred_ref = np.concatenate([np.zeros((cut_pred_ref.shape[0], 1)), cut_pred_ref[:, :184], cut_pred_ref[:, 185].reshape(-1, 1)], axis=-1)
variant_df['cut_prob_true_var'] = [cut_true_var[i, :] for i in range(len(variant_df))]
variant_df['cut_prob_pred_var'] = [cut_pred_var[i, :] for i in range(len(variant_df))]
variant_df['cut_prob_true_ref'] = [cut_true_ref[i, :] for i in range(len(variant_df))]
variant_df['cut_prob_pred_ref'] = [cut_pred_ref[i, :] for i in range(len(variant_df))]
variant_df['iso_pred_from_cuts_var'] = np.sum(cut_pred_var[:, 49: 90], axis=-1)
variant_df['iso_pred_from_cuts_ref'] = np.sum(cut_pred_ref[:, 49: 90], axis=-1)
variant_df['logodds_pred_from_cuts_var'] = np.log(variant_df['iso_pred_from_cuts_var'] / (1.0 - variant_df['iso_pred_from_cuts_var']))
variant_df['logodds_pred_from_cuts_ref'] = np.log(variant_df['iso_pred_from_cuts_ref'] / (1.0 - variant_df['iso_pred_from_cuts_ref']))
variant_df['delta_logodds_pred'] = variant_df['logodds_pred_var'] - variant_df['logodds_pred_ref']
variant_df['delta_logodds_pred_from_cuts'] = variant_df['logodds_pred_from_cuts_var'] - variant_df['logodds_pred_from_cuts_ref']
variant_df['mean_delta_logodds_pred'] = (variant_df['delta_logodds_pred'] + variant_df['delta_logodds_pred_from_cuts']) / 2.0
return seq_df, variant_df
def aggregate_and_append_predictions(seq_df, seq_cuts, pred_df, cuts_pred) :
#Aggregate predictions over barcoded replicates
pred_df['row_index'] = np.arange(len(pred_df), dtype=np.int)
pred_df['iso_pred_from_cuts'] = np.sum(np.array(cuts_pred[:, 77:107].todense()), axis=-1)
pred_df['iso_pred_mix'] = (pred_df['iso_pred'] + pred_df['iso_pred_from_cuts']) / 2.
pred_df_group = pred_df.groupby("master_seq")
pred_df_agg = pred_df_group.agg({
'master_seq' : 'first',
'row_index' : lambda x: tuple(x),
'iso_pred' : lambda x: tuple(x),
'iso_pred_from_cuts' : lambda x: tuple(x),
'iso_pred_mix' : lambda x: tuple(x),
'pooled_total_count' : lambda x: tuple(x)
})
pred_df_agg['sum_total_count'] = pred_df_agg['pooled_total_count'].apply(lambda x: np.sum(list(x)))
for pred_suffix in ['', '_from_cuts', '_mix'] :
pred_df_agg['iso_pred' + pred_suffix] = pred_df_agg.apply(
lambda row: np.sum(np.ravel(list(row['iso_pred' + pred_suffix])) * np.ravel(list(row['pooled_total_count']))) / row['sum_total_count']
,axis=1
)
pred_df_agg['logodds_pred' + pred_suffix] = np.log(pred_df_agg['iso_pred' + pred_suffix] / (1. - pred_df_agg['iso_pred' + pred_suffix]))
dense_cuts_pred = np.array(cuts_pred.todense())
dense_cuts_pred_agg = np.zeros((len(pred_df_agg), dense_cuts_pred.shape[1]))
i = 0
for _, row in pred_df_agg.iterrows() :
old_ix = list(row['row_index'])
counts = np.ravel(list(row['pooled_total_count'])).reshape(-1, 1)
total_count = row['sum_total_count']
dense_cuts_pred_agg[i, :] = np.sum(dense_cuts_pred[old_ix, :] * counts / total_count, axis=0)
i += 1
pred_df_agg = pred_df_agg[['master_seq', 'iso_pred', 'iso_pred_from_cuts', 'iso_pred_mix', 'logodds_pred', 'logodds_pred_from_cuts', 'logodds_pred_mix']]
#Join dataframe with prediction table and calculate true cut probabilities
seq_df['row_index_true'] = np.arange(len(seq_df), dtype=np.int)
pred_df_agg['row_index_pred'] = np.arange(len(pred_df_agg), dtype=np.int)
seq_df = seq_df.join(pred_df_agg.set_index('master_seq'), on='master_seq', how='inner').copy().reset_index(drop=True)
seq_cuts = seq_cuts[np.ravel(seq_df['row_index_true'].values), :]
dense_cuts_pred_agg = dense_cuts_pred_agg[np.ravel(seq_df['row_index_pred'].values), 20:]
cut_true = np.concatenate([np.array(seq_cuts[:, 180 + 20: 180 + 205].todense()), np.array(seq_cuts[:, -1].todense()).reshape(-1, 1)], axis=-1)
#Add small pseudo count to true cuts
cut_true += 0.0005
cut_true = cut_true / np.sum(cut_true, axis=-1).reshape(-1, 1)
seq_df['cut_prob_true'] = [cut_true[i, :] for i in range(len(seq_df))]
seq_df['cut_prob_pred'] = [dense_cuts_pred_agg[i, :] for i in range(len(seq_df))]
return seq_df
#Max Isoform Helper Functions
def plot_sequence_logo(df, df_human, max_iso_pwm_dict, gene, subexperiments, override_mean_stats=False, plot_percentile=True, plot_mean_logo=True, plot_max_logo=True, plot_actual_pwm=True, plot_opt_pwm=True, black_fixed_seq=True, max_index=None, true_column='median_proximal_vs_distal_logodds', figsize=(12, 3), width_ratios=[1, 7], logo_height=1.0, usage_unit='log', plot_snvs=False, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, pas_downscaling=1.0, save_figs=False, fig_name=None, fig_dpi=300) :
#Make sequence logo
df_seqs = df.query("variant == 'wt' or variant == 'sampled'")
df_seqs = df_seqs.loc[df_seqs.subexperiment.isin(subexperiments)]
#Mean logos
seqs = list(df_seqs['master_seq'].values)
n_seqs = len(seqs)
pwm = get_pwm(seqs)
wt_seqs = list(df_seqs['wt_seq'].unique())
wt_mean_logodds = np.zeros(len(wt_seqs))
wt_std_logodds = np.zeros(len(wt_seqs))
#Get wt seq cluster statistics
for i, wt_seq in enumerate(wt_seqs) :
wt_mean_logodds[i] = np.mean(df_seqs.query("wt_seq == '" + wt_seq + "'")[true_column])
wt_std_logodds[i] = np.std(df_seqs.query("wt_seq == '" + wt_seq + "'")[true_column])
opt_pwm = np.zeros(pwm.shape)
for wt_seq in wt_seqs :
subexperiment = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['subexperiment'].values)[0]
subexp_pwm = np.vstack([np.ones((1, 4)) * 0.25, find_wt_pwm(wt_seq, max_iso_pwm_dict[gene + '_' + subexperiment])])[:164, :]
opt_pwm += subexp_pwm
fixed_seq = []
if np.sum(opt_pwm) > 0 :
for j in range(0, opt_pwm.shape[0]) :
if np.sum(opt_pwm[j, :]) > 0 :
opt_pwm[j, :] /= np.sum(opt_pwm[j, :])
if np.max(opt_pwm[j, :]) == 1. :
fixed_seq.append(True)
else :
fixed_seq.append(False)
#Slice according to seq trim index
seqs = [seq[seq_trim_start: seq_trim_end] for seq in seqs]
fixed_seq = fixed_seq[seq_trim_start: seq_trim_end]
pwm = pwm[:, seq_trim_start: seq_trim_end]
opt_pwm = opt_pwm[:, seq_trim_start: seq_trim_end]
pwm += 0.001
for j in range(0, pwm.shape[0]) :
pwm[j, :] /= np.sum(pwm[j, :])
#Plot actual array pwm
entropy = np.zeros(pwm.shape)
entropy[pwm > 0] = pwm[pwm > 0] * -np.log2(pwm[pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
fig = plt.figure(figsize=figsize)
n_rows = 0
if plot_actual_pwm :
n_rows += 1
if plot_opt_pwm :
n_rows += 1
if plot_mean_logo and plot_max_logo :
n_rows *= 2
gs = None
if plot_percentile :
gs = gridspec.GridSpec(n_rows, 3, width_ratios=width_ratios, height_ratios=[1 for k in range(n_rows)])
else :
gs = gridspec.GridSpec(n_rows, 2, width_ratios=[width_ratios[0], width_ratios[-1]], height_ratios=[1 for k in range(n_rows)])
ax0 = None
ax1 = None
ax8 = None
ax2 = None
ax3 = None
ax9 = None
ax4 = None
ax5 = None
ax10 = None
ax6 = None
ax7 = None
ax11 = None
row_i = 0
logo_col = 2
if not plot_percentile :
logo_col = 1
if plot_mean_logo :
if plot_actual_pwm :
ax0 = plt.subplot(gs[row_i, 0])
ax1 = plt.subplot(gs[row_i, logo_col])
if plot_percentile :
ax8 = plt.subplot(gs[row_i, 1])
row_i += 1
if plot_opt_pwm :
ax2 = plt.subplot(gs[row_i, 0])
ax3 = plt.subplot(gs[row_i, logo_col])
if plot_percentile :
ax9 = plt.subplot(gs[row_i, 1])
row_i += 1
if plot_max_logo :
if plot_actual_pwm :
ax4 = plt.subplot(gs[row_i, 0])
ax5 = plt.subplot(gs[row_i, logo_col])
if plot_percentile :
ax10 = plt.subplot(gs[row_i, 1])
row_i += 1
if plot_opt_pwm :
ax6 = plt.subplot(gs[row_i, 0])
ax7 = plt.subplot(gs[row_i, logo_col])
if plot_percentile :
ax11 = plt.subplot(gs[row_i, 1])
row_i += 1
stats_ax = [ax0, ax2, ax4, ax6]
perc_ax = [ax8, ax9, ax10, ax11]
logo_ax = [ax1, ax3, ax5, ax7]
if plot_mean_logo :
if plot_actual_pwm :
plt.sca(stats_ax[0])
plt.axis('off')
if plot_opt_pwm :
plt.sca(stats_ax[1])
plt.axis('off')
if plot_max_logo :
if plot_actual_pwm :
plt.sca(stats_ax[2])
plt.axis('off')
if plot_opt_pwm :
plt.sca(stats_ax[3])
plt.axis('off')
if plot_percentile :
if plot_mean_logo :
if plot_actual_pwm :
plt.sca(perc_ax[0])
plt.axis('off')
if plot_opt_pwm :
plt.sca(perc_ax[1])
plt.axis('off')
if plot_max_logo :
if plot_actual_pwm :
plt.sca(perc_ax[2])
plt.axis('off')
if plot_opt_pwm :
plt.sca(perc_ax[3])
plt.axis('off')
human_logodds = sorted(np.array(np.ravel(df_human[true_column].values)))
height_base = (1.0 - logo_height) / 2.
if plot_mean_logo :
n_samples = len(df_seqs)
mean_logodds = np.mean(df_seqs[true_column])
std_logodds = np.std(df_seqs[true_column])
perc = float(len(np.nonzero(human_logodds <= mean_logodds)[0])) / float(len(df_human))
perc *= 100.
annot_text = 'Samples = ' + str(int(n_samples))
#annot_text += '\nLogodds = ' + str(round(mean_logodds, 2)) + ' +- ' + str(round(std_logodds, 2))
if usage_unit == 'log' :
annot_text += '\nLogodds = ' + str(round(mean_logodds, 2))
else :
usage = 1. / (1. + np.exp(-mean_logodds))
annot_text += '\nUsage = ' + str(round(usage, 4))
annot_text += '\nPerc. = ' + str(round(perc, 2)) + '%'
side_plot_i = 0
if not plot_actual_pwm :
side_plot_i = 1
stats_ax[side_plot_i].text(0.99, 0.5, annot_text, horizontalalignment='right', verticalalignment='center', transform=stats_ax[side_plot_i].transAxes, color='black', fontsize=12, weight="bold")
if plot_percentile :
perc_ax[side_plot_i].plot(np.arange(len(df_human)), human_logodds, linewidth=2, color='black')
perc_ax[side_plot_i].scatter([len(np.nonzero(human_logodds <= mean_logodds)[0])], [mean_logodds], s=50, c='orange')
x_coord = len(np.nonzero(human_logodds <= mean_logodds)[0])
perc_ax[side_plot_i].plot([x_coord, x_coord], [np.min(human_logodds), mean_logodds], color='black', linestyle='--', linewidth=1.5)
if plot_actual_pwm :
for j in range(plot_start, plot_end) :
sort_index = np.argsort(pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[0], color=color)
else :
prev_prob = np.sum(pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[0], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[0])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[0].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
#Plot optimization pwm
entropy = np.zeros(opt_pwm.shape)
entropy[opt_pwm > 0] = opt_pwm[opt_pwm > 0] * -np.log2(opt_pwm[opt_pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
if plot_opt_pwm :
for j in range(plot_start, plot_end) :
sort_index = np.argsort(opt_pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = opt_pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[1], color=color)
else :
prev_prob = np.sum(opt_pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[1], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[1])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[1].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
if plot_max_logo :
wt_max_sort_index = np.argsort(wt_mean_logodds)[::-1]
wt_max_index = 0
if max_index == 'mid' :
wt_max_index = wt_max_sort_index[int(len(wt_max_sort_index) / 2)]
else :
wt_max_index = wt_max_sort_index[max_index]
df_seq = df_seqs.query("wt_seq == '" + wt_seqs[wt_max_index] + "'")
seqs = list(df_seq['master_seq'].values)
n_seqs = len(seqs)
pwm = get_pwm(seqs)
wt_seq = wt_seqs[wt_max_index]
n_samples = len(df_seq)
wt_mean_logodds = wt_mean_logodds[wt_max_index]
wt_std_logodds = wt_std_logodds[wt_max_index]
if override_mean_stats :
n_samples = len(df_seqs)
wt_mean_logodds = np.mean(df_seqs[true_column])
wt_std_logodds = np.std(df_seqs[true_column])
subexperiment = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['subexperiment'].values)[0]
opt_pwm = np.vstack([np.ones((1, 4)) * 0.25, find_wt_pwm(wt_seq, max_iso_pwm_dict[gene + '_' + subexperiment])])[:164, :]
if np.sum(opt_pwm) > 0 :
for j in range(0, opt_pwm.shape[0]) :
if np.sum(opt_pwm[j, :]) > 0 :
opt_pwm[j, :] /= np.sum(opt_pwm[j, :])
#Slice according to seq trim index
seqs = [seq[seq_trim_start: seq_trim_end] for seq in seqs]
pwm = pwm[:, seq_trim_start: seq_trim_end]
opt_pwm = opt_pwm[:, seq_trim_start: seq_trim_end]
pwm += 0.001
for j in range(0, pwm.shape[0]) :
pwm[j, :] /= np.sum(pwm[j, :])
#Plot actual array pwm
entropy = np.zeros(pwm.shape)
entropy[pwm > 0] = pwm[pwm > 0] * -np.log2(pwm[pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
perc = float(len(np.nonzero(human_logodds <= wt_mean_logodds)[0])) / float(len(df_human))
perc *= 100.
annot_text = 'Samples = ' + str(int(n_samples))
#annot_text += '\nLogodds = ' + str(round(wt_mean_logodds, 2)) + ' +- ' + str(round(wt_std_logodds, 2))
if usage_unit == 'log' :
annot_text += '\nLogodds = ' + str(round(wt_mean_logodds, 2))
else :
usage = 1. / (1. + np.exp(-wt_mean_logodds))
annot_text += '\nUsage = ' + str(round(usage, 4))
annot_text += '\nPerc. = ' + str(round(perc, 2)) + '%'
side_plot_i = 2
if not plot_actual_pwm :
side_plot_i = 3
stats_ax[side_plot_i].text(0.99, 0.5, annot_text, horizontalalignment='right', verticalalignment='center', transform=stats_ax[side_plot_i].transAxes, color='black', fontsize=12, weight="bold")
if plot_percentile :
perc_ax[side_plot_i].plot(np.arange(len(df_human)), human_logodds, linewidth=2, color='black')
perc_ax[side_plot_i].scatter([len(np.nonzero(human_logodds <= wt_mean_logodds)[0])], [wt_mean_logodds], s=50, c='orange')
x_coord = len(np.nonzero(human_logodds <= wt_mean_logodds)[0])
perc_ax[side_plot_i].plot([x_coord, x_coord], [np.min(human_logodds), wt_mean_logodds], color='black', linestyle='--', linewidth=1.5)
if plot_actual_pwm :
for j in range(plot_start, plot_end) :
sort_index = np.argsort(pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[2], color=color)
else :
prev_prob = np.sum(pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[2], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[2])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[2].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
#Plot optimization pwm
entropy = np.zeros(opt_pwm.shape)
entropy[opt_pwm > 0] = opt_pwm[opt_pwm > 0] * -np.log2(opt_pwm[opt_pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
if plot_opt_pwm :
for j in range(plot_start, plot_end) :
sort_index = np.argsort(opt_pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = opt_pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[3], color=color)
else :
prev_prob = np.sum(opt_pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[3], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[3])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[3].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
for axis in fig.axes :
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
plt.tight_layout()
if save_figs :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
#Max Isoform- optimized sequence PWMs (generated by SeqProp)
def load_max_isoform_pwms() :
file_path = 'max_isoform_logos/'
max_iso_pwm_dict = {}
#Doubledope, Simple, Tomm5 v1
max_iso_pwm_dict['doubledope_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['doubledope_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['doubledope_max_score_punish_cruns_aruns'] = np.load(file_path + 'apa_array_v3/doubledope_max_class_max_score_punish_cruns_aruns_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['doubledope_target_00'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_025'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_05'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_075'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_10'] = np.load(file_path + 'apa_array_v1/doubledope_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/simple_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['simple_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/simple_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['simple_max_score_punish_cruns_aruns'] = np.load(file_path + 'apa_array_v3/simple_max_class_max_score_punish_cruns_aruns_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['simple_target_00'] = np.load(file_path + 'apa_array_v1/simple_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_025'] = np.load(file_path + 'apa_array_v1/simple_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_05'] = np.load(file_path + 'apa_array_v1/simple_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_075'] = np.load(file_path + 'apa_array_v1/simple_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_10'] = np.load(file_path + 'apa_array_v1/simple_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['tomm5_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['tomm5_target_00'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_025'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_05'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_075'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_10'] = np.load(file_path + 'apa_array_v1/tomm5_max_class_target10_1_images_5_tries_final_pwms.npy')
#APASIX v1
max_iso_pwm_dict['aar_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/aar_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['aar_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/aar_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['aar_target_00'] = np.load(file_path + 'apa_array_v1/aar_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_025'] = np.load(file_path + 'apa_array_v1/aar_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_05'] = np.load(file_path + 'apa_array_v1/aar_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_075'] = np.load(file_path + 'apa_array_v1/aar_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_10'] = np.load(file_path + 'apa_array_v1/aar_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/atr_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['atr_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/atr_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['atr_target_00'] = np.load(file_path + 'apa_array_v1/atr_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_025'] = np.load(file_path + 'apa_array_v1/atr_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_05'] = np.load(file_path + 'apa_array_v1/atr_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_075'] = np.load(file_path + 'apa_array_v1/atr_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_10'] = np.load(file_path + 'apa_array_v1/atr_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['hsp_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/hsp_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['hsp_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/hsp_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['snh_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/snh_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['snh_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/snh_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/sox_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/sox_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_target_00'] = np.load(file_path + 'apa_array_v1/sox_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_025'] = np.load(file_path + 'apa_array_v1/sox_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_05'] = np.load(file_path + 'apa_array_v1/sox_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_075'] = np.load(file_path + 'apa_array_v1/sox_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_10'] = np.load(file_path + 'apa_array_v1/sox_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['wha_max_score_punish_cruns_softer'] = np.load(file_path + 'apa_array_v1/wha_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['wha_max_score_punish_cruns_harder'] = np.load(file_path + 'apa_array_v1/wha_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
#Doubledope, Simple, Tomm5 v2 low entropy
max_iso_pwm_dict['doubledope_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['doubledope_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['doubledope_max_score_punish_cruns_aruns_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_max_score_punish_cruns_aruns_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['doubledope_max_score_punish_cruns_aruns_cstf_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_max_score_punish_cruns_aruns_cstf_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['doubledope_target_00_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_025_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_05_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_075_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['doubledope_target_10_v2'] = np.load(file_path + 'apa_array_v2/doubledope_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['simple_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['simple_max_score_punish_cruns_aruns_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_max_score_punish_cruns_aruns_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['simple_max_score_punish_cruns_aruns_cstf_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_max_score_punish_cruns_aruns_cstf_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['simple_target_00_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_025_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_05_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_075_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['simple_target_10_v2'] = np.load(file_path + 'apa_array_v2/simple_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_max_score_punish_cruns_softer_1_images_20_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['tomm5_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_max_score_punish_cruns_harder_1_images_20_tries_final_pwms.npy')[:10,:,:]
max_iso_pwm_dict['tomm5_target_00_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_025_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_05_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_075_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['tomm5_target_10_v2'] = np.load(file_path + 'apa_array_v2/tomm5_max_class_target10_1_images_5_tries_final_pwms.npy')
#APASIX v2 low entropy
max_iso_pwm_dict['aar_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['aar_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['aar_target_00_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_025_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_05_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_075_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['aar_target_10_v2'] = np.load(file_path + 'apa_array_v2/aar_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['atr_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['atr_target_00_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_025_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_05_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_075_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['atr_target_10_v2'] = np.load(file_path + 'apa_array_v2/atr_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['hsp_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/hsp_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['hsp_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/hsp_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['snh_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/snh_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['snh_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/snh_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['sox_target_00_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_target00_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_025_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_target025_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_05_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_target05_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_075_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_target075_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['sox_target_10_v2'] = np.load(file_path + 'apa_array_v2/sox_max_class_target10_1_images_5_tries_final_pwms.npy')
max_iso_pwm_dict['wha_max_score_punish_cruns_softer_v2'] = np.load(file_path + 'apa_array_v2/wha_max_class_max_score_punish_cruns_softer_1_images_10_tries_final_pwms.npy')[:5,:,:]
max_iso_pwm_dict['wha_max_score_punish_cruns_harder_v2'] = np.load(file_path + 'apa_array_v2/wha_max_class_max_score_punish_cruns_harder_1_images_10_tries_final_pwms.npy')[:5,:,:]
return max_iso_pwm_dict
#Max Cut Helper Functions
def plot_cut_profile(cut_df_filtered, cut_to_experiment, cut_poses, objective_poses, human_cutprob, figsize=(8, 5.5), save_fig_name=None, fig_dpi=150, plot_mode='mean', n_samples=None) :
f = plt.figure(figsize=figsize)
ls = []
for cut_pos in cut_to_experiment :
keep_index = np.nonzero(cut_df_filtered['subexperiment'].isin(cut_to_experiment[cut_pos]))[0]
prox_prob = np.array(cut_df_filtered.iloc[keep_index]['proxcut_prob_true'].values.tolist())
if plot_mode == 'mean' :
if n_samples is not None :
shuffle_index = np.arange(prox_prob.shape[0])
np.random.shuffle(shuffle_index)
prox_prob = prox_prob[shuffle_index[:n_samples], :]
prox_prob = prox_prob.mean(axis=0)
prox_prob = np.ravel(prox_prob)
elif plot_mode == 'max' :
losses = np.array(cut_df_filtered.iloc[keep_index]['loss_logloss'].values)
sort_index = np.argsort(losses)
prox_prob = prox_prob[sort_index, :]
prox_prob = prox_prob[:n_samples, :]
if n_samples > 1 :
prox_prob = prox_prob.mean(axis=0)
prox_prob = np.ravel(prox_prob)
l1, = plt.plot(cut_poses, prox_prob, linewidth=2, label='Objective ' + str(cut_pos))
fill_x_coords = np.concatenate([np.array([np.min(cut_poses)]), cut_poses, np.array([np.max(cut_poses)])], axis=0)
fill_y_coords = np.concatenate([np.array([0]), prox_prob, np.array([0])], axis=0)
plt.fill(fill_x_coords, fill_y_coords, alpha=0.2)
ls.append(l1)
la = plt.axvline(x=np.argmax(human_cutprob) + cut_poses[0] + 1, linewidth=2, linestyle='--', color='black', alpha=0.7, label='Native')
ls.append(la)
plt.legend(handles=ls, fontsize=14, loc='upper right', bbox_to_anchor=(1.05, 1.3), frameon=True, framealpha=1.0)
plt.xticks(objective_poses, objective_poses, fontsize=18)
plt.yticks(fontsize=18)
plt.xlim(np.min(cut_poses), np.max(cut_poses))
plt.ylim(0)
plt.xlabel('Cleavage position from end of CSE', fontsize=18)
plt.ylabel('Cleavage proportion', fontsize=18)
plt.tight_layout()
if save_fig_name is not None :
plt.savefig(save_fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(save_fig_name + '.svg')
plt.savefig(save_fig_name + '.eps')
plt.show()
def plot_cut_map(cut_df_filtered, cut_to_experiment, cut_poses, objective_poses, human_cutprob, figsize=(4, 6), save_fig_name=None, fig_dpi=150, plot_mode='mean', n_samples=None) :
cut_probs = []
cut_label_coords = []
prev_label_coords = [0]
cut_labels = []
f = plt.figure(figsize=figsize)
for objective_pos, _ in sorted(cut_to_experiment.items(), key=lambda kv: kv[0]) :
keep_index = np.nonzero(cut_df_filtered['subexperiment'].isin(cut_to_experiment[objective_pos]))[0]
prox_prob = np.array(cut_df_filtered.iloc[keep_index]['proxcut_prob_true'].values.tolist())
if plot_mode == 'mean' :
shuffle_index = np.arange(prox_prob.shape[0])
np.random.shuffle(shuffle_index)
if n_samples is not None :
prox_prob = prox_prob[shuffle_index[:n_samples], :]
elif plot_mode == 'max' :
losses = np.array(cut_df_filtered.iloc[keep_index]['loss_logloss'].values)
sort_index = np.argsort(losses)
prox_prob = prox_prob[sort_index, :]
prox_prob = prox_prob[:n_samples, :]
shuffle_index = np.arange(prox_prob.shape[0])
np.random.shuffle(shuffle_index)
prox_prob = prox_prob[shuffle_index, :]
cut_probs.append(prox_prob)
cut_labels.append(str(objective_pos))
cut_label_coords.append(prev_label_coords[-1] + float(prox_prob.shape[0]) / 2.)
prev_label_coords.append(prev_label_coords[-1] + float(prox_prob.shape[0]))
plt.axhline(y=prev_label_coords[-1], color='black', linewidth=2, linestyle='--')
plt.axvline(x=objective_pos, color='orange', linewidth=2, linestyle='--', alpha=0.2)
cut_probs = np.vstack(cut_probs)
plt.imshow(np.concatenate([np.zeros((cut_probs.shape[0], cut_poses[0])), cut_probs], axis=1), cmap='Greens', vmin=0.05, vmax=0.3, aspect='auto')
plt.xlabel('Cleavage position', fontsize=18)
plt.ylabel('Cleavage objective', fontsize=18)
plt.xlim(1, cut_probs.shape[1] + 1)
ax = plt.gca()
ax.set_xticks(objective_poses)
ax.set_xticklabels(objective_poses, fontsize=14, ha='center', va='top')
ax.set_yticks(cut_label_coords)
ax.set_yticklabels(cut_labels, fontsize=14, ha='right', va='center')
plt.tight_layout()
if save_fig_name is not None :
plt.savefig(save_fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(save_fig_name + '.svg')
plt.savefig(save_fig_name + '.eps')
plt.show()
def plot_position_scatter(cut_df_filtered, cut_to_experiment, cut_poses, objective_poses, human_cutprob, variant_filter="variant == 'wt'", figsize=(5, 5), save_fig_name=None, fig_dpi=150) :
f = plt.figure(figsize=figsize)
avgpos_true_all = []
avgpos_pred_all = []
for cut_pos, _ in sorted(cut_to_experiment.items(), key=lambda kv: kv[0]) :
keep_index = np.nonzero(cut_df_filtered['subexperiment'].isin(cut_to_experiment[cut_pos]))[0]
avgpos_true = np.ravel(np.array(cut_df_filtered.iloc[keep_index].query(variant_filter)['avgpos_true']))
avgpos_pred = np.ravel(np.array(cut_df_filtered.iloc[keep_index].query(variant_filter)['avgpos_pred']))
avgpos_true_all.append(avgpos_true)
avgpos_pred_all.append(avgpos_pred)
plt.scatter(avgpos_pred - (56 + cut_poses[0]), avgpos_true - (56 + cut_poses[0]), s=15, label=str(cut_pos), alpha=0.5)
avgpos_true_all = np.ravel(np.concatenate(avgpos_true_all, axis=0))
avgpos_pred_all = np.ravel(np.concatenate(avgpos_pred_all, axis=0))
rval, _ = pearsonr(avgpos_true_all, avgpos_pred_all)
plt.xlabel('Predicted cut position', fontsize=16)
plt.ylabel('Observed cut position', fontsize=16)
plt.xticks(objective_poses, objective_poses, fontsize=12)
plt.yticks(objective_poses, objective_poses, fontsize=12)
plt.title('R^2 = ' + str(round(rval * rval, 2)), fontsize=16)
plt.legend(fontsize=12)
plt.tight_layout()
if save_fig_name is not None :
plt.savefig(save_fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(save_fig_name + '.eps')
plt.show()
def plot_cut_logo(df, df_human, max_cut_pwm_dict, max_cut_yhat_dict, gene, subexperiments, plot_mfe=False, fold_height=0.5, annotate_peaks=False, normalize_probs=False, normalize_range=[57, 105], agg_mode='avg', override_mean_stats=False, plot_percentile=True, plot_mean_logo=True, plot_max_logo=True, plot_actual_pwm=True, plot_opt_pwm=True, black_fixed_seq=True, max_index=None, true_column='median_proximal_vs_distal_logodds_true', cut_column='pooled_cut_prob_true', figsize=(12, 3), width_ratios=[1, 7], logo_height=1.0, usage_unit='log', plot_snvs=False, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, pas_downscaling=1.0, save_figs=False, fig_name=None, fig_dpi=300) :
#Make sequence logo
df_seqs = df.copy()#df.query("variant == 'wt' or variant == 'sampled'")
df_seqs = df_seqs.loc[df_seqs.subexperiment.isin(subexperiments)]
#Mean logos
seqs = list(df_seqs['master_seq'].values)
n_seqs = len(seqs)
pwm = get_pwm(seqs)
prob = np.zeros(pwm.shape[0])
for cut_prob in list(df_seqs[cut_column].values) :
if normalize_probs :
if normalize_range is not None :
prob[normalize_range[0]:normalize_range[1]] += cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
prob += cut_prob[:164] / np.sum(cut_prob[:164])
else :
prob += cut_prob[:164]
if agg_mode in ['avg', 'max'] :
prob /= float(len(seqs))
elif agg_mode in ['pool', 'max'] :
prob /= np.sum(prob)
pred_prob = np.zeros(pwm.shape[0])
for cut_prob in list(df_seqs['cut_prob_pred'].values) :
if normalize_probs :
if normalize_range is not None :
pred_prob[normalize_range[0]:normalize_range[1]] += cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
pred_prob += cut_prob[:164] / np.sum(cut_prob[:164])
else :
pred_prob += cut_prob[:164]
if agg_mode in ['avg', 'max'] :
pred_prob /= float(len(seqs))
elif agg_mode in ['pool', 'max'] :
pred_prob /= np.sum(pred_prob)
wt_seqs = list(df_seqs['wt_seq'].unique())
wt_mean_logodds = np.zeros(len(wt_seqs))
wt_avgpos = np.zeros(len(wt_seqs))
wt_logloss = np.zeros(len(wt_seqs))
#Get wt seq cluster statistics
for i, wt_seq in enumerate(wt_seqs) :
#wt_mean_logodds[i] = np.mean(df_seqs.query("wt_seq == '" + wt_seq + "'")[true_column])
wt_usage_list = [np.sum(cut_prob[57:105]) for cut_prob in list(df_seqs.query("wt_seq == '" + wt_seq + "'")[cut_column].values)]
if normalize_probs and normalize_range is not None :
wt_usage_list = [np.sum(cut_prob[normalize_range[0]:normalize_range[1]]) / (np.sum(cut_prob[normalize_range[0]:normalize_range[1]]) + cut_prob[-1]) for cut_prob in list(df_seqs.query("wt_seq == '" + wt_seq + "'")[cut_column].values)]
wt_logodds_list = [np.log(p / (1. - p)) for p in wt_usage_list]
wt_mean_logodds[i] = np.mean(np.array(wt_logodds_list))
wt_avgpos[i] = np.mean(df_seqs.query("wt_seq == '" + wt_seq + "'")['avgpos_true'])
if agg_mode == 'max' :
wt_logloss[i] = np.min(df_seqs.query("wt_seq == '" + wt_seq + "'")['loss_logloss'])
else :
wt_logloss[i] = np.mean(df_seqs.query("wt_seq == '" + wt_seq + "'")['loss_logloss'])
opt_pwm = np.zeros(pwm.shape)
opt_prob = np.zeros(pwm.shape[0])
n_opt = 0
for wt_seq in wt_seqs :
subexperiment = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['subexperiment'].values)[0]
subexp_pwm = np.vstack([np.ones((1, 4)) * 0.25, find_wt_pwm(wt_seq, max_cut_pwm_dict[gene + '_' + subexperiment])])[:164, :]
subexp_prob = find_wt_yhat(wt_seq, max_cut_pwm_dict[gene + '_' + subexperiment], max_cut_yhat_dict[gene + '_' + subexperiment])
opt_pwm += subexp_pwm
if normalize_probs :
if normalize_range is not None :
opt_prob[normalize_range[0]:normalize_range[1]] += subexp_prob[normalize_range[0]-1:normalize_range[1]-1] / np.sum(subexp_prob[normalize_range[0]-1:normalize_range[1]-1])
else :
opt_prob[1:] += subexp_prob[:163] / np.sum(subexp_prob[:163])
else :
opt_prob[1:] += subexp_prob[:163]
n_opt += 1.
fixed_seq = []
if np.sum(opt_pwm) > 0 :
if agg_mode in ['avg', 'max'] :
opt_prob /= n_opt
elif agg_mode in ['pool', 'max'] :
opt_prob /= np.sum(opt_prob)
for j in range(0, opt_pwm.shape[0]) :
if np.sum(opt_pwm[j, :]) > 0 :
opt_pwm[j, :] /= np.sum(opt_pwm[j, :])
if np.max(opt_pwm[j, :]) == 1. :
fixed_seq.append(True)
else :
fixed_seq.append(False)
#Slice according to seq trim index
seqs = [seq[seq_trim_start: seq_trim_end] for seq in seqs]
fixed_seq = fixed_seq[seq_trim_start: seq_trim_end]
pwm = pwm[:, seq_trim_start: seq_trim_end]
opt_pwm = opt_pwm[:, seq_trim_start: seq_trim_end]
opt_prob = opt_prob[seq_trim_start: seq_trim_end]
pwm += 0.001
for j in range(0, pwm.shape[0]) :
pwm[j, :] /= np.sum(pwm[j, :])
entropy = np.zeros(pwm.shape)
entropy[pwm > 0] = pwm[pwm > 0] * -np.log2(pwm[pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
fig = plt.figure(figsize=figsize)
n_rows = 0
if plot_actual_pwm :
n_rows += 2
if plot_opt_pwm :
n_rows += 2
if plot_mean_logo and plot_max_logo :
n_rows *= 2
gs = gridspec.GridSpec(n_rows, 2, width_ratios=[width_ratios[0], width_ratios[-1]], height_ratios=[1 for k in range(n_rows)])
ax0 = None
ax1 = None
ax8 = None
ax2 = None
ax3 = None
ax9 = None
ax4 = None
ax5 = None
ax10 = None
ax6 = None
ax7 = None
ax11 = None
#cut axes
ax12 = None
ax13 = None
ax14 = None
ax15 = None
row_i = 1
if plot_mean_logo :
if plot_actual_pwm :
ax0 = plt.subplot(gs[row_i, 0])
ax1 = plt.subplot(gs[row_i, 1])
if plot_percentile :
ax8 = plt.subplot(gs[row_i-1, 0])
ax12 = plt.subplot(gs[row_i-1, 1])
row_i += 2
if plot_opt_pwm :
ax2 = plt.subplot(gs[row_i, 0])
ax3 = plt.subplot(gs[row_i, 1])
if plot_percentile :
ax9 = plt.subplot(gs[row_i-1, 0])
ax13 = plt.subplot(gs[row_i-1, 1])
row_i += 2
if plot_max_logo :
if plot_actual_pwm :
ax4 = plt.subplot(gs[row_i, 0])
ax5 = plt.subplot(gs[row_i, 1])
if plot_percentile :
ax10 = plt.subplot(gs[row_i-1, 0])
ax14 = plt.subplot(gs[row_i-1, 1])
row_i += 2
if plot_opt_pwm :
ax6 = plt.subplot(gs[row_i, 0])
ax7 = plt.subplot(gs[row_i, 1])
if plot_percentile :
ax11 = plt.subplot(gs[row_i-1, 0])
ax15 = plt.subplot(gs[row_i-1, 1])
row_i += 2
stats_ax = [ax0, ax2, ax4, ax6]
perc_ax = [ax8, ax9, ax10, ax11]
logo_ax = [ax1, ax3, ax5, ax7]
cut_ax = [ax12, ax13, ax14, ax15]
if plot_mean_logo :
if plot_actual_pwm :
plt.sca(stats_ax[0])
plt.axis('off')
if plot_opt_pwm :
plt.sca(stats_ax[1])
plt.axis('off')
if plot_max_logo :
if plot_actual_pwm :
plt.sca(stats_ax[2])
plt.axis('off')
if plot_opt_pwm :
plt.sca(stats_ax[3])
plt.axis('off')
if plot_percentile :
if plot_mean_logo :
if plot_actual_pwm :
plt.sca(perc_ax[0])
plt.axis('off')
if plot_opt_pwm :
plt.sca(perc_ax[1])
plt.axis('off')
if plot_max_logo :
if plot_actual_pwm :
plt.sca(perc_ax[2])
plt.axis('off')
if plot_opt_pwm :
plt.sca(perc_ax[3])
plt.axis('off')
human_logodds = sorted(np.array(np.ravel(df_human[true_column[:-5]].values)))
height_base = (1.0 - logo_height) / 2.
objective_pos = int(subexperiments[0].split('_')[-1]) - 49
human_cutprob = np.mean(np.array(df_human[cut_column].values.tolist())[:, 50:110], axis=0)
if plot_mean_logo :
n_samples = len(df_seqs)
mean_logodds = np.mean(wt_mean_logodds)#np.mean(df_seqs[true_column])
std_logodds = np.std(wt_mean_logodds)
perc = float(len(np.nonzero(human_logodds <= mean_logodds)[0])) / float(len(df_human))
perc *= 100.
annot_text = 'Samples = ' + str(int(n_samples))
#annot_text += '\nLogodds = ' + str(round(mean_logodds, 2)) + ' +- ' + str(round(std_logodds, 2))
if usage_unit == 'log' :
annot_text += '\nLogodds = ' + str(round(mean_logodds, 2))
else :
usage = 1. / (1. + np.exp(-mean_logodds))
annot_text += '\nUsage = ' + str(round(usage, 4))
annot_text += '\nPerc. = ' + str(round(perc, 2)) + '%'
side_plot_i = 0
if not plot_actual_pwm :
side_plot_i = 1
stats_ax[side_plot_i].text(0.99, 0.5, annot_text, horizontalalignment='right', verticalalignment='center', transform=stats_ax[side_plot_i].transAxes, color='black', fontsize=12, weight="bold")
if plot_percentile :
perc_ax[side_plot_i].plot(np.arange(len(human_cutprob)), human_cutprob, linewidth=2, color='black')
perc_ax[side_plot_i].scatter([objective_pos], [human_cutprob[objective_pos]], s=50, c='red')
if objective_pos <= 30 :
perc_ax[side_plot_i].annotate('Objective', xy=(objective_pos, human_cutprob[objective_pos]), xycoords='data', xytext=(0.55, 0.8), fontsize=10, weight="bold", color='red', textcoords='axes fraction', arrowprops=dict(connectionstyle="arc3,rad=-.2", headlength=8, headwidth=8, shrink=0.05, width=1.5, color='black'))
else :
perc_ax[side_plot_i].annotate('Objective', xy=(objective_pos, human_cutprob[objective_pos]), xycoords='data', xytext=(0.55, 0.8), fontsize=10, weight="bold", color='red', textcoords='axes fraction', arrowprops=dict(connectionstyle="arc3,rad=.2", headlength=8, headwidth=8, shrink=0.05, width=1.5, color='black'))
if plot_actual_pwm :
l2, = cut_ax[0].plot(np.arange(plot_end - plot_start) + plot_start, prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Observed', color='black', alpha=0.7)
l1, = cut_ax[0].plot(np.arange(plot_end - plot_start) + plot_start, pred_prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Predicted', color='red', alpha=0.7)
if annotate_peaks :
annot_text = str(int(round(prob[objective_pos + 50] * 100, 0))) + '% Cleavage'
cut_ax[2].annotate(annot_text, xy=(objective_pos + 50, prob[objective_pos + 50]), xycoords='data', xytext=(-30, -5), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(connectionstyle="arc3,rad=-.1", headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
plt.sca(cut_ax[0])
plt.xlim((plot_start, plot_end))
#plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.legend(handles=[l1, l2], fontsize=12, prop=dict(weight='bold'), frameon=False)
plt.axis('off')
for j in range(plot_start, plot_end) :
sort_index = np.argsort(pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[0], color=color)
else :
prev_prob = np.sum(pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[0], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[0])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[0].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
#Plot optimization pwm
entropy = np.zeros(opt_pwm.shape)
entropy[opt_pwm > 0] = opt_pwm[opt_pwm > 0] * -np.log2(opt_pwm[opt_pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
if plot_opt_pwm :
l2, = cut_ax[1].plot(np.arange(plot_end - plot_start) + plot_start, prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Observed', color='black', alpha=0.7)
l1, = cut_ax[1].plot(np.arange(plot_end - plot_start) + plot_start, opt_prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Predicted', color='red', alpha=0.7)
if annotate_peaks :
annot_text = str(int(round(prob[objective_pos + 50] * 100, 0))) + '% Cleavage'
cut_ax[2].annotate(annot_text, xy=(objective_pos + 50, prob[objective_pos + 50]), xycoords='data', xytext=(-30, -5), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(connectionstyle="arc3,rad=-.1", headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
plt.sca(cut_ax[1])
plt.xlim((plot_start, plot_end))
#plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.legend(handles=[l1, l2], fontsize=12, prop=dict(weight='bold'), frameon=False)
plt.axis('off')
for j in range(plot_start, plot_end) :
sort_index = np.argsort(opt_pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = opt_pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[1], color=color)
else :
prev_prob = np.sum(opt_pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[1], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
plt.sca(logo_ax[1])
plt.xlim((plot_start, plot_end))
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[1].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
if plot_max_logo :
wt_max_sort_index = np.argsort(wt_logloss)
wt_max_index = 0
if max_index == 'mid' :
wt_max_index = wt_max_sort_index[int(len(wt_max_sort_index) / 2)]
else :
wt_max_index = wt_max_sort_index[max_index]
df_seq = df_seqs.query("wt_seq == '" + wt_seqs[wt_max_index] + "'")
seqs = list(df_seq['master_seq'].values)
n_seqs = len(seqs)
pwm = get_pwm(seqs)
prob = np.zeros(pwm.shape[0])
pred_prob = np.zeros(pwm.shape[0])
if agg_mode == 'max' :
in_logloss = np.zeros(len(seqs))
#Get wt seq cluster statistics
for i, seq in enumerate(seqs) :
in_logloss[i] = list(df_seq.query("master_seq == '" + seq + "'")['loss_logloss'].values)[0]
in_max_sort_index = np.argsort(in_logloss)
max_seq = seqs[in_max_sort_index[0]]
cut_prob = list(df_seq.query("master_seq == '" + max_seq + "'")[cut_column].values)[0]
if normalize_probs :
if normalize_range is not None :
prob[normalize_range[0]:normalize_range[1]] = cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
prob = cut_prob[:164] / np.sum(cut_prob[:164])
else :
prob = cut_prob[:164]
cut_prob = list(df_seq.query("master_seq == '" + max_seq + "'")['cut_prob_pred'].values)[0]
if normalize_probs :
if normalize_range is not None :
pred_prob[normalize_range[0]:normalize_range[1]] = cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
pred_prob = cut_prob[:164] / np.sum(cut_prob[:164])
else :
pred_prob = cut_prob[:164]
else :
for seq_i, cut_prob in enumerate(list(df_seq[cut_column].values)) :
if normalize_probs :
if normalize_range is not None :
prob[normalize_range[0]:normalize_range[1]] += cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
prob += cut_prob[:164] / np.sum(cut_prob[:164])
else :
prob += cut_prob[:164]
if agg_mode == 'avg' :
prob /= float(len(seqs))
elif agg_mode == 'pool' :
prob /= np.sum(prob)
for cut_prob in list(df_seq['cut_prob_pred'].values) :
if normalize_probs :
if normalize_range is not None :
pred_prob[normalize_range[0]:normalize_range[1]] += cut_prob[normalize_range[0]:normalize_range[1]] / np.sum(cut_prob[normalize_range[0]:normalize_range[1]])
else :
pred_prob += cut_prob[:164] / np.sum(cut_prob[:164])
else :
pred_prob += cut_prob[:164]
if agg_mode == 'avg' :
pred_prob /= float(len(seqs))
elif agg_mode == 'pool' :
pred_prob /= np.sum(pred_prob)
wt_seq = wt_seqs[wt_max_index]
n_samples = len(df_seq)
wt_mean_logodds = wt_mean_logodds[wt_max_index]
wt_avgpos = wt_avgpos[wt_max_index]
wt_logloss = wt_logloss[wt_max_index]
if override_mean_stats :
n_samples = len(df_seqs)
wt_mean_logodds = np.mean(wt_mean_logodds)
wt_avgpos = np.mean(df_seqs['avgpos_true'])
wt_logloss = np.mean(df_seqs['loss_logloss'])
subexperiment = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['subexperiment'].values)[0]
opt_pwm = np.vstack([np.ones((1, 4)) * 0.25, find_wt_pwm(wt_seq, max_cut_pwm_dict[gene + '_' + subexperiment])])[:164, :]
opt_prob = np.zeros(opt_pwm.shape[0])
subexp_prob = find_wt_yhat(wt_seq, max_cut_pwm_dict[gene + '_' + subexperiment], max_cut_yhat_dict[gene + '_' + subexperiment])[:163]
if normalize_probs :
if normalize_range is not None :
opt_prob[normalize_range[0]:normalize_range[1]] = subexp_prob[normalize_range[0]-1:normalize_range[1]-1] / np.sum(subexp_prob[normalize_range[0]-1:normalize_range[1]-1])
else :
opt_prob[1:] = subexp_prob[:163] / np.sum(subexp_prob[:163])
else :
opt_prob[1:] = subexp_prob[:163]
if np.sum(opt_pwm) > 0 :
for j in range(0, opt_pwm.shape[0]) :
if np.sum(opt_pwm[j, :]) > 0 :
opt_pwm[j, :] /= np.sum(opt_pwm[j, :])
#Slice according to seq trim index
seqs = [seq[seq_trim_start: seq_trim_end] for seq in seqs]
pwm = pwm[:, seq_trim_start: seq_trim_end]
prob = prob[seq_trim_start: seq_trim_end]
pred_prob = pred_prob[seq_trim_start: seq_trim_end]
opt_pwm = opt_pwm[:, seq_trim_start: seq_trim_end]
opt_prob = opt_prob[seq_trim_start: seq_trim_end]
pwm += 0.001
for j in range(0, pwm.shape[0]) :
pwm[j, :] /= np.sum(pwm[j, :])
#Plot actual array pwm
entropy = np.zeros(pwm.shape)
entropy[pwm > 0] = pwm[pwm > 0] * -np.log2(pwm[pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
perc = float(len(np.nonzero(human_logodds <= wt_mean_logodds)[0])) / float(len(df_human))
perc *= 100.
annot_text = 'Samples = ' + str(int(n_samples))
#annot_text += '\nLogodds = ' + str(round(wt_mean_logodds, 2)) + ' +- ' + str(round(wt_std_logodds, 2))
if usage_unit == 'log' :
annot_text += '\nLogodds = ' + str(round(wt_mean_logodds, 2))
else :
usage = 1. / (1. + np.exp(-wt_mean_logodds))
annot_text += '\nUsage = ' + str(round(usage, 4))
annot_text += '\nPerc. = ' + str(round(perc, 2)) + '%'
side_plot_i = 2
if not plot_actual_pwm :
side_plot_i = 3
stats_ax[side_plot_i].text(0.99, 0.5, annot_text, horizontalalignment='right', verticalalignment='center', transform=stats_ax[side_plot_i].transAxes, color='black', fontsize=12, weight="bold")
if plot_percentile :
perc_ax[side_plot_i].plot(np.arange(len(human_cutprob)), human_cutprob, linewidth=2, color='black')
perc_ax[side_plot_i].scatter([objective_pos], [human_cutprob[objective_pos]], s=70, c='red', alpha=1.0)
if objective_pos <= 30 :
perc_ax[side_plot_i].annotate('Objective', xy=(objective_pos, human_cutprob[objective_pos]), xycoords='data', xytext=(0.55, 0.8), fontsize=10, weight="bold", color='red', textcoords='axes fraction', arrowprops=dict(connectionstyle="arc3,rad=-.2", headlength=8, headwidth=8, shrink=0.05, width=1.5, color='black'))
else :
perc_ax[side_plot_i].annotate('Objective', xy=(objective_pos, human_cutprob[objective_pos]), xycoords='data', xytext=(0.55, 0.8), fontsize=10, weight="bold", color='red', textcoords='axes fraction', arrowprops=dict(connectionstyle="arc3,rad=.2", headlength=8, headwidth=8, shrink=0.05, width=1.5, color='black'))
perc_ax[side_plot_i].axvline(x=0, linewidth=1.5, color='green', linestyle='--')
perc_ax[side_plot_i].axvline(x=6, linewidth=1.5, color='green', linestyle='--')
if plot_actual_pwm :
l2, = cut_ax[2].plot(np.arange(plot_end - plot_start) + plot_start, prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Observed', color='black', alpha=0.7)
l1, = cut_ax[2].plot(np.arange(plot_end - plot_start) + plot_start, pred_prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Predicted', color='red', alpha=0.7)
if annotate_peaks :
annot_text = str(int(round(prob[objective_pos + 50] * 100, 0))) + '% Cleavage'
cut_ax[2].annotate(annot_text, xy=(objective_pos + 50, prob[objective_pos + 50]), xycoords='data', xytext=(-30, -5), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(connectionstyle="arc3,rad=-.1", headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
plt.sca(cut_ax[2])
plt.xlim((plot_start, plot_end))
#plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.legend(handles=[l1, l2], fontsize=12, prop=dict(weight='bold'), frameon=False)
plt.axis('off')
for j in range(plot_start, plot_end) :
sort_index = np.argsort(pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[2], color=color)
else :
prev_prob = np.sum(pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[2], color=color)
#ax[0].plot([0, pwm.shape[0]], [0, 1], color='black', linestyle='--')
if plot_mfe :
mfe = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['mfe'].values)[0]
mfe_struct = ('X' * 56) + list(df_seqs.query("wt_seq == '" + wt_seq + "'")['struct'].values)[0] + ('X' * 100)
for j in range(plot_start, plot_end) :
if mfe_struct[j] != 'X' :
letterAt(mfe_struct[j], j + 0.5, -fold_height, fold_height-0.05, logo_ax[2], color='black')
annot_text = 'MFE = ' + str(round(mfe, 1))
#logo_ax[2].annotate(annot_text, xy=(56, -fold_height/2), xycoords='data', xytext=(-30, 0), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
logo_ax[2].text(55, -fold_height/2 -0.05, annot_text, horizontalalignment='right', verticalalignment='center', color='black', fontsize=12, weight="bold")
plt.sca(logo_ax[2])
plt.xlim((plot_start, plot_end))
if plot_mfe :
plt.ylim((-fold_height-0.02, 2))
else :
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[2].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
#Plot optimization pwm
entropy = np.zeros(opt_pwm.shape)
entropy[opt_pwm > 0] = opt_pwm[opt_pwm > 0] * -np.log2(opt_pwm[opt_pwm > 0])
entropy = np.sum(entropy, axis=1)
conservation = 2 - entropy
if plot_opt_pwm :
l2, = cut_ax[3].plot(np.arange(plot_end - plot_start) + plot_start, prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Observed', color='black', alpha=0.7)
l1, = cut_ax[3].plot(np.arange(plot_end - plot_start) + plot_start, opt_prob[plot_start:plot_end], linewidth=3, linestyle='-', label='Predicted', color='red', alpha=0.7)
if annotate_peaks :
annot_text = str(int(round(prob[objective_pos + 50] * 100, 0))) + '% Cleavage'
cut_ax[2].annotate(annot_text, xy=(objective_pos + 50, prob[objective_pos + 50]), xycoords='data', xytext=(-30, -5), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(connectionstyle="arc3,rad=-.1", headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
plt.sca(cut_ax[3])
plt.xlim((plot_start, plot_end))
#plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.legend(handles=[l1, l2], fontsize=12, prop=dict(weight='bold'), frameon=False)
plt.axis('off')
for j in range(plot_start, plot_end) :
sort_index = np.argsort(opt_pwm[j, :])
for ii in range(0, 4) :
i = sort_index[ii]
nt_prob = opt_pwm[j, i] * conservation[j]
nt = ''
if i == 0 :
nt = 'A'
elif i == 1 :
nt = 'C'
elif i == 2 :
nt = 'G'
elif i == 3 :
nt = 'T'
color = None
if fixed_seq[j] and black_fixed_seq :
color = 'black'
if ii == 0 :
letterAt(nt, j + 0.5, height_base, nt_prob * logo_height, logo_ax[3], color=color)
else :
prev_prob = np.sum(opt_pwm[j, sort_index[:ii]] * conservation[j]) * logo_height
letterAt(nt, j + 0.5, height_base + prev_prob, nt_prob * logo_height, logo_ax[3], color=color)
if plot_mfe :
mfe = list(df_seqs.query("wt_seq == '" + wt_seq + "'")['mfe'].values)[0]
mfe_struct = ('X' * 56) + list(df_seqs.query("wt_seq == '" + wt_seq + "'")['struct'].values)[0] + ('X' * 100)
for j in range(plot_start, plot_end) :
if mfe_struct[j] != 'X' :
letterAt(mfe_struct[j], j + 0.5, -fold_height, fold_height-0.05, logo_ax[2], color='black')
annot_text = 'MFE = ' + str(round(mfe, 1))
#logo_ax[2].annotate(annot_text, xy=(56, -fold_height/2), xycoords='data', xytext=(-30, 0), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
logo_ax[2].text(55, -fold_height/2 -0.05, annot_text, horizontalalignment='right', verticalalignment='center', color='black', fontsize=12, weight="bold")
plt.sca(logo_ax[2])
plt.xlim((plot_start, plot_end))
if plot_mfe :
plt.ylim((-fold_height-0.02, 2))
else :
plt.ylim((0, 2))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
logo_ax[3].axhline(y=0.01 + height_base, color='black', linestyle='-', linewidth=2)
for axis in fig.axes :
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
plt.tight_layout()
if save_figs :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
#Max Cut- optimized sequence PWMs (generated by SeqProp)
def load_max_cut_pwms() :
file_path = 'max_cut_logos/'
max_cut_pwm_dict = {}
max_cut_yhat_dict = {}
cut_list = ['60', '65', '70', '75', '80', '85', '90', '95', '100']
cut_list_last = ['85', '90', '95', '100']
#A
for cut_pos_i in range(0, len(cut_list)) :
cut_pos_str = cut_list[cut_pos_i]
cut_pos = int(cut_pos_str)
y_hats_1 = np.load(file_path + 'A_iter_4000/simple_' + cut_pos_str + '_max_class_max_score_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'A_iter_4000/simple_' + cut_pos_str + '_max_class_max_score_1_images_pwm.npy')[opt_index_1, :, :][:3, :, :]
y_hats_1 = y_hats_1[opt_index_1, :][:3, :]
y_hats_2 = np.load(file_path + 'simple_' + cut_pos_str + '_max_class_max_score_1_images_cuthat.npy')
opt_index_2 = np.argsort(np.sum(y_hats_2[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_2 = np.load(file_path + 'simple_' + cut_pos_str + '_max_class_max_score_1_images_pwm.npy')[opt_index_2, :, :][:2, :, :]
y_hats_2 = y_hats_2[opt_index_2, :][:2, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_ent_max_class_max_score_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_ent_max_class_max_score_ent_1_images_pwm.npy')[opt_index_ent, :, :][:2, :, :]
y_hats_ent = y_hats_ent[opt_index_ent, :][:2, :]
max_cut_pwm_dict['simple_' + 'A' + '_' + str(cut_pos_str)] = np.concatenate([pwms_1, pwms_2], axis=0)
max_cut_yhat_dict['simple_' + 'A' + '_' + str(cut_pos_str)] = np.concatenate([y_hats_1, y_hats_2], axis=0)
max_cut_pwm_dict['simple_' + 'A_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'A_ent' + '_' + str(cut_pos_str)] = y_hats_ent
#A GGCC
for cut_pos_i in range(0, len(cut_list_last)) :
cut_pos_str = cut_list_last[cut_pos_i]
y_hats_1 = np.load(file_path + 'A_GGCC_iter_4000/simple_' + cut_pos_str + '_GGCC_max_class_max_score_GGCC_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'A_GGCC_iter_4000/simple_' + cut_pos_str + '_GGCC_max_class_max_score_GGCC_1_images_pwm.npy')[opt_index_1, :, :][:3, :, :]
y_hats_2 = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_max_class_max_score_GGCC_1_images_cuthat.npy')
opt_index_2 = np.argsort(np.sum(y_hats_2[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_2 = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_max_class_max_score_GGCC_1_images_pwm.npy')[opt_index_2, :, :][:2, :, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_ent_max_class_max_score_GGCC_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_ent_max_class_max_score_GGCC_ent_1_images_pwm.npy')[opt_index_ent, :, :][:2, :, :]
max_cut_pwm_dict['simple_' + 'A_GGCC' + '_' + str(cut_pos_str)] = np.concatenate([pwms_1, pwms_2], axis=0)
max_cut_yhat_dict['simple_' + 'A_GGCC' + '_' + str(cut_pos_str)] = np.concatenate([y_hats_1, y_hats_2], axis=0)
max_cut_pwm_dict['simple_' + 'A_GGCC_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'A_GGCC_ent' + '_' + str(cut_pos_str)] = y_hats_ent
#AT
for cut_pos_i in range(0, len(cut_list)) :
cut_pos_str = cut_list[cut_pos_i]
y_hats_1 = np.load(file_path + 'AT_iter_5000/simple_' + cut_pos_str + '_AT_max_class_max_score_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'AT_iter_5000/simple_' + cut_pos_str + '_AT_max_class_max_score_1_images_pwm.npy')[opt_index_1, :, :][:3, :, :]
y_hats_2 = np.load(file_path + 'simple_' + cut_pos_str + '_AT_max_class_max_score_1_images_cuthat.npy')
opt_index_2 = np.argsort(np.sum(y_hats_2[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_2 = np.load(file_path + 'simple_' + cut_pos_str + '_AT_max_class_max_score_1_images_pwm.npy')[opt_index_2, :, :][:2, :, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_AT_ent_max_class_max_score_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_AT_ent_max_class_max_score_ent_1_images_pwm.npy')[opt_index_ent, :, :][:4, :, :]
max_cut_pwm_dict['simple_' + 'AT' + '_' + str(cut_pos_str)] = np.concatenate([pwms_1, pwms_2], axis=0)
max_cut_yhat_dict['simple_' + 'AT' + '_' + str(cut_pos_str)] = np.concatenate([y_hats_1, y_hats_2], axis=0)
max_cut_pwm_dict['simple_' + 'AT_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'AT_ent' + '_' + str(cut_pos_str)] = y_hats_ent
#AT GGCC
for cut_pos_i in range(0, len(cut_list_last)) :
cut_pos_str = cut_list_last[cut_pos_i]
y_hats_1 = np.load(file_path + 'AT_GGCC_strong_iter_5000/simple_' + cut_pos_str + '_AT_GGCC_max_class_max_score_GGCC_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'AT_GGCC_strong_iter_5000/simple_' + cut_pos_str + '_AT_GGCC_max_class_max_score_GGCC_1_images_pwm.npy')[opt_index_1, :, :][:3, :, :]
y_hats_2 = np.load(file_path + 'simple_' + cut_pos_str + '_AT_GGCC_max_class_max_score_GGCC_1_images_cuthat.npy')
opt_index_2 = np.argsort(np.sum(y_hats_2[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_2 = np.load(file_path + 'simple_' + cut_pos_str + '_AT_GGCC_max_class_max_score_GGCC_1_images_pwm.npy')[opt_index_2, :, :][:2, :, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_AT_GGCC_ent_max_class_max_score_GGCC_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_AT_GGCC_ent_max_class_max_score_GGCC_ent_1_images_pwm.npy')[opt_index_ent, :, :][:4, :, :]
max_cut_pwm_dict['simple_' + 'AT_GGCC' + '_' + str(cut_pos_str)] = np.concatenate([pwms_1, pwms_2], axis=0)
max_cut_yhat_dict['simple_' + 'AT_GGCC' + '_' + str(cut_pos_str)] = np.concatenate([y_hats_1, y_hats_2], axis=0)
max_cut_pwm_dict['simple_' + 'AT_GGCC_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'AT_GGCC_ent' + '_' + str(cut_pos_str)] = y_hats_ent
#A punish aruns
for cut_pos_i in range(0, len(cut_list)) :
cut_pos_str = cut_list[cut_pos_i]
y_hats_1 = np.load(file_path + 'simple_' + cut_pos_str + '_punish_aruns_max_class_max_score_punish_aruns_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'simple_' + cut_pos_str + '_punish_aruns_max_class_max_score_punish_aruns_1_images_pwm.npy')[opt_index_1, :, :][:5, :, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_punish_aruns_ent_max_class_max_score_punish_aruns_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_punish_aruns_ent_max_class_max_score_punish_aruns_ent_1_images_pwm.npy')[opt_index_ent, :, :][:4, :, :]
max_cut_pwm_dict['simple_' + 'A_aruns' + '_' + str(cut_pos_str)] = pwms_1
max_cut_yhat_dict['simple_' + 'A_aruns' + '_' + str(cut_pos_str)] = y_hats_1
max_cut_pwm_dict['simple_' + 'A_aruns_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'A_aruns_ent' + '_' + str(cut_pos_str)] = y_hats_ent
#A GGCC punish aruns
for cut_pos_i in range(0, len(cut_list_last)) :
cut_pos_str = cut_list_last[cut_pos_i]
y_hats_1 = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_punish_aruns_max_class_max_score_GGCC_punish_aruns_1_images_cuthat.npy')
opt_index_1 = np.argsort(np.sum(y_hats_1[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_1 = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_punish_aruns_max_class_max_score_GGCC_punish_aruns_1_images_pwm.npy')[opt_index_1, :, :][:5, :, :]
y_hats_ent = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_punish_aruns_ent_max_class_max_score_GGCC_punish_aruns_ent_1_images_cuthat.npy')
opt_index_ent = np.argsort(np.sum(y_hats_ent[:, cut_pos-1:cut_pos+2], axis=1), axis=0)[::-1]
pwms_ent = np.load(file_path + 'simple_' + cut_pos_str + '_GGCC_punish_aruns_ent_max_class_max_score_GGCC_punish_aruns_ent_1_images_pwm.npy')[opt_index_ent, :, :][:4, :, :]
max_cut_pwm_dict['simple_' + 'A_GGCC_aruns' + '_' + str(cut_pos_str)] = pwms_1
max_cut_yhat_dict['simple_' + 'A_GGCC_aruns' + '_' + str(cut_pos_str)] = y_hats_1
max_cut_pwm_dict['simple_' + 'A_GGCC_aruns_ent' + '_' + str(cut_pos_str)] = pwms_ent
max_cut_yhat_dict['simple_' + 'A_GGCC_aruns_ent' + '_' + str(cut_pos_str)] = y_hats_ent
return max_cut_pwm_dict, max_cut_yhat_dict
#Max Cut SNV Helper Functions
def mut_map_fold_snvs(df_gene, gene_name, experiment, mode, figsize=(12, 3), delta_column='delta_logodds', mark_pathogenic=False, mark_benign=False, mark_undetermined=False, border_eta=0.085, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, cut_downscaling=0.5, pas_downscale_mode='frac', fig_name=None, fig_dpi=300) :
mut_map = np.zeros((4, 164))
ref_seq = df_gene['wt_seq'].values[0]
for _, row in df_gene.iterrows() :
snv_pos = row['snv_pos']
if row['wt_seq'] != ref_seq :
continue
delta_logodds_true = row[delta_column]
if np.isnan(delta_logodds_true) :
delta_logodds_true = 0
base = 0
if row['master_seq'][snv_pos] == 'A' :
base = 0
elif row['master_seq'][snv_pos] == 'C' :
base = 1
elif row['master_seq'][snv_pos] == 'G' :
base = 2
elif row['master_seq'][snv_pos] == 'T' :
base = 3
mut_map[3-base, snv_pos] = delta_logodds_true
obj_pos = int(experiment[0].split("_")[-1]) + 1
mut_map[:, obj_pos] = mut_map[:, obj_pos] * cut_downscaling
#Slice according to seq trim index
ref_seq = ref_seq[seq_trim_start: seq_trim_end]
mut_map = mut_map[:, seq_trim_start: seq_trim_end]
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
bias = np.max(np.sum(mut_map[:, :], axis=0)) / 3.0 + 0.5
max_score = np.min(np.sum(mut_map[:, :], axis=0)) / 3.0 * -1 + bias
for i in range(plot_start, plot_end) :
mutability_score = np.sum(mut_map[:, i]) / 3.0 * -1 + bias
letterAt(ref_seq[i], i + 0.5, 0, mutability_score, ax[0])
ax[0].plot([0, mut_map.shape[1]], [bias, bias], color='black', linestyle='--')
plt.sca(ax[0])
plt.yticks([0.5, bias, max_score], [round(bias - 0.5, 2), 0, round((max_score - bias) * -1, 2)], fontsize=16)
plt.xticks(fontsize=16)
plt.xlim((plot_start, plot_end))
plt.ylim((0, max_score))
plt.tight_layout()
pcm = ax[1].pcolor(mut_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
#fig.colorbar(pcm, ax=ax[1])
plt.sca(ax[1])
ref_seq_list = []
for c in ref_seq :
ref_seq_list.append(c)
plt.xticks(np.arange(len(ref_seq)) + 0.5, ref_seq_list)
plt.xticks([], [])
plt.yticks([0.5, 1.5, 2.5, 3.5], ['T', 'G', 'C', 'A'], fontsize=16)
plt.axis([plot_start, plot_end, 0, 4])
plt.gca().xaxis.tick_top()
#plt.savefig(name + '.svg', bbox_inches='tight')
#plt.savefig(name + '.png', bbox_inches='tight')
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
def struct_map_fold_snvs(df_gene, gene_name, experiment, mode, figsize=(12, 3), delta_column='delta_logodds', mark_pathogenic=False, mark_benign=False, mark_undetermined=False, border_eta=0.085, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, cut_downscaling=0.5, pas_downscale_mode='frac', fig_name=None, fig_dpi=300) :
mut_map = np.zeros((3, 164))
count_map = np.zeros((3, 164))
ref_seq = df_gene['wt_seq'].values[0]
for _, row in df_gene.iterrows() :
snv_pos = row['snv_pos']
if row['wt_seq'] != ref_seq :
continue
delta_logodds_true = row[delta_column]
if np.isnan(delta_logodds_true) :
delta_logodds_true = 0
base = 0
if row['master_seq'][snv_pos] == '(' :
base = 0
elif row['master_seq'][snv_pos] == '.' :
base = 1
elif row['master_seq'][snv_pos] == ')' :
base = 2
mut_map[2-base, snv_pos] += delta_logodds_true
count_map[2-base, snv_pos] += delta_logodds_true
mut_map[count_map > 0] /= count_map[count_map > 0]
obj_pos = int(experiment[0].split("_")[-1]) + 1
mut_map[:, obj_pos] = mut_map[:, obj_pos] * cut_downscaling
#Slice according to seq trim index
ref_seq = ref_seq[seq_trim_start: seq_trim_end]
mut_map = mut_map[:, seq_trim_start: seq_trim_end]
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
bias = np.max(np.sum(mut_map[:, :], axis=0)) / 2.0 + 0.5
max_score = np.min(np.sum(mut_map[:, :], axis=0)) / 2.0 * -1 + bias
for i in range(plot_start, plot_end) :
mutability_score = np.sum(mut_map[:, i]) / 2.0 * -1 + bias
letterAt(ref_seq[i], i + 0.5, 0, mutability_score, ax[0])
ax[0].plot([0, mut_map.shape[1]], [bias, bias], color='black', linestyle='--')
plt.sca(ax[0])
plt.yticks([0.5, bias, max_score], [round(bias - 0.5, 2), 0, round((max_score - bias) * -1, 2)], fontsize=16)
plt.xticks(fontsize=16)
plt.xlim((plot_start, plot_end))
plt.ylim((0, max_score))
plt.tight_layout()
pcm = ax[1].pcolor(mut_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
#fig.colorbar(pcm, ax=ax[1])
plt.sca(ax[1])
ref_seq_list = []
for c in ref_seq :
ref_seq_list.append(c)
plt.xticks(np.arange(len(ref_seq)) + 0.5, ref_seq_list)
plt.xticks([], [])
plt.yticks([0.5, 1.5, 2.5], [')', '.', '('], fontsize=16)
plt.axis([plot_start, plot_end, 0, 3])
plt.gca().xaxis.tick_top()
#plt.savefig(name + '.svg', bbox_inches='tight')
#plt.savefig(name + '.png', bbox_inches='tight')
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
#SNV Analysis Helper Functions
def plot_position_delta_scatter(df, min_pred_filter=0.0, sort_pred=True, figsize=(14,6), dot_size=12, dot_alpha=0.5, vmin=-0.5, vmax=0.45, show_stats=True, fig_name=None, plot_start=-50, plot_end=100, fig_dpi=300, annotate=None, bg_df=None, pred_column='delta_logodds_pred', true_column='delta_logodds_true', snv_pos_column='snv_pos') :
fig = plt.figure(figsize=figsize)
keep_index = np.abs(np.ravel(df[pred_column].values)) >= min_pred_filter
df = df.loc[keep_index]
df_indel = df.query("variant == 'indel'")
df = df.query("variant != 'indel'")
annotation_height = 1.0
if 'psi' in true_column :
annotation_height = 0.15
border_eta = 0.00
if bg_df is not None :
snv_pos = np.ravel(bg_df[snv_pos_column].values) - 50
delta_logodds_true = np.ravel(bg_df[true_column].values)
delta_logodds_pred = np.ravel(bg_df[pred_column].values)
sort_index = np.argsort(np.abs(delta_logodds_pred))
snv_pos = snv_pos[sort_index]
delta_logodds_true = delta_logodds_true[sort_index]
delta_logodds_pred = delta_logodds_pred[sort_index]
delta_logodds_true[delta_logodds_true < 0.0] -= annotation_height
plt.scatter(snv_pos, delta_logodds_true, c=delta_logodds_pred, cmap="bwr", vmin=-0.5, vmax=0.45, alpha=0.01, s=12)
snv_pos = np.ravel(df[snv_pos_column].values) - 50
delta_logodds_true = np.ravel(df[true_column].values)
delta_logodds_pred = np.ravel(df[pred_column].values)
r_val, p_val = pearsonr(delta_logodds_true, delta_logodds_pred)
n_points = len(df)
if sort_pred :
sort_index = np.argsort(np.abs(delta_logodds_pred))
snv_pos = snv_pos[sort_index]
delta_logodds_true = delta_logodds_true[sort_index]
delta_logodds_pred = delta_logodds_pred[sort_index]
delta_logodds_true[delta_logodds_true < 0.0] -= annotation_height
ax = plt.gca()
ax.add_patch(Rectangle((-50 + border_eta, -annotation_height + border_eta), 50 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=4))
ax.add_patch(Rectangle((0 + border_eta, -annotation_height + border_eta), 6 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='darkgreen', edgecolor='black', lw=4))
ax.add_patch(Rectangle((6 + border_eta, -annotation_height + border_eta), 54 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=4))
ax.add_patch(Rectangle((60 + border_eta, -annotation_height + border_eta), 75 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=4))
#ax.text(-25, -annotation_height/2., 'USE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
#ax.text(33, -annotation_height/2., 'DSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
#ax.text(85, -annotation_height/2., 'FDSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
use_start = plot_start
use_end = 0
if use_end - use_start > 10 :
ax.text(use_start + (use_end - use_start) / 2., -annotation_height/2., 'USE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
dse_start = 6
dse_end = min(60, plot_end)
if dse_end - dse_start > 10 :
ax.text(dse_start + (dse_end - dse_start) / 2., -annotation_height/2., 'DSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
fdse_start = 60
fdse_end = min(60 + 75, plot_end)
if fdse_end - fdse_start > 10 :
ax.text(fdse_start + (fdse_end - fdse_start) / 2., -annotation_height/2., 'FDSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
plt.scatter(snv_pos, delta_logodds_true, c=delta_logodds_pred, cmap="bwr", vmin=vmin, vmax=vmax, alpha=dot_alpha, s=dot_size)
#plt.plot([np.min(snv_pos), np.max(snv_pos)], [0, 0], c='darkred', linewidth=2, linestyle='--')
#Plot any indels
if len(df_indel) > 0 and True == False :
snv_pos_indel = np.ravel(df_indel[snv_pos_column].values) - 50
delta_logodds_true_indel = np.ravel(df_indel[true_column].values)
delta_logodds_pred_indel = np.ravel(df_indel[pred_column].values)
plt.scatter(snv_pos_indel, delta_logodds_true_indel, c="black", marker="D", vmin=vmin, vmax=vmax, alpha=dot_alpha, s=dot_size)
if annotate is not None :
annotate_right_list = annotate['annotate_right_list']
annotate_right_down_list = annotate['annotate_right_down_list']
annotate_left_list = annotate['annotate_left_list']
annotate_left_down_list = annotate['annotate_left_down_list']
annotate_once = {}
for index, row in df.iterrows() :
if row['gene'] in annotate_once :
continue
annotate_once[row['gene']] = True
d_logodds_true = row[true_column]
if d_logodds_true < 0.0 :
d_logodds_true -= annotation_height
if row['gene'] in annotate_right_list :
plt.annotate(row['gene'],
xy=(row['snv_pos'] - 50, d_logodds_true), xycoords='data',
xytext=(30, 30), textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="-", color='black', lw=2))
elif row['gene'] in annotate_right_down_list :
plt.annotate(row['gene'],
xy=(row['snv_pos'] - 50, d_logodds_true), xycoords='data',
xytext=(30, -30), textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="-", color='black', lw=2))
elif row['gene'] in annotate_left_list :
plt.annotate(row['gene'],
xy=(row['snv_pos'] - 50, d_logodds_true), xycoords='data',
xytext=(-80, 30), textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="-", color='black', lw=2))
elif row['gene'] in annotate_left_down_list :
plt.annotate(row['gene'],
xy=(row['snv_pos'] - 50, d_logodds_true), xycoords='data',
xytext=(-80, -30), textcoords='offset points', fontsize=16,
arrowprops=dict(arrowstyle="-", color='black', lw=2))
annot_text = 'R^2 = ' + str(round(r_val * r_val, 2))
annot_text += '\nn = ' + str(n_points)
if show_stats :
ax = plt.gca()
ax.text(0.90, 0.80, annot_text, horizontalalignment='right', verticalalignment='bottom', transform=ax.transAxes, color='black', fontsize=16, weight="bold")
if plot_start != -50 or plot_end != 100 :
plt.xticks([plot_start, 0, 6, plot_end], [plot_start, 0, 6, plot_end], fontsize=18)
else :
plt.xticks([-100, -50, -25, 0, 6, 25, 50, 100], [-100, -50, -25, 0, 6, 25, 50, 100], fontsize=18)
if not 'psi' in true_column :
plt.yticks([-7, -5, -3, -1, 0, 2, 4], [-6, -4, -2, 0, 0, 2, 4], fontsize=18)
else :
plt.yticks([-1.0 - annotation_height, -0.5 - annotation_height, 0 - annotation_height, 0, 0.5, 1.0], [-1.0, -0.5, 0, 0, 0.5, 1.0], fontsize=18)
#plt.axis([np.min(snv_pos), np.max(snv_pos), np.min((delta_logodds_true)), np.max((delta_logodds_true))])
plt.axis([-50, 100, -6., 4.])
plt.xlabel('Position relative to pPAS', fontsize=18)
plt.ylabel('Observed Delta pPAS logodds', fontsize=18)
plt.title('Position vs. Delta Usage', fontsize=18)
plt.xlim(plot_start, plot_end)
if not 'psi' in true_column :
plt.ylim(-7, 4)
else :
plt.ylim(-1 - annotation_height, 1)
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', dpi=fig_dpi, transparent=True)
plt.savefig(fig_name + '.eps')
plt.show()
def mut_map_v2(df, gene_name, experiment, mode, true_column='delta_logodds_true', pred_column='delta_logodds_pred', figsize=(12, 3), mark_pathogenic=False, mark_benign=False, mark_undetermined=False, border_eta=0.085, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, pas_downscaling=0.5, pas_downscale_mode='frac', fig_name=None, fig_dpi=300) :
mut_map = np.zeros((4, 164))
mut_map_pred = np.zeros((4, 164))
df_gene = None
if experiment is not None :
df_gene = df.query("gene == '" + gene_name + "' and experiment == '" + experiment + "'")
else :
df_gene = df.query("gene == '" + gene_name + "'")
ref_seq = df_gene['wt_seq'].values[0]
for index, row in df_gene.iterrows() :
snv_pos = row['snv_pos']
if row['wt_seq'] != ref_seq :
continue
delta_logodds_true = row['delta_logodds_true']
delta_logodds_pred = row['delta_logodds_pred']
if np.isnan(delta_logodds_true) :
delta_logodds_true = 0
if np.isnan(delta_logodds_pred) :
delta_logodds_pred = 0
base = 0
if index[snv_pos] == 'A' :
base = 0
elif index[snv_pos] == 'C' :
base = 1
elif index[snv_pos] == 'G' :
base = 2
elif index[snv_pos] == 'T' :
base = 3
mut_map[3-base, snv_pos] = delta_logodds_true
mut_map_pred[3-base, snv_pos] = delta_logodds_pred
if mode == 'pred' :
mut_map[:, :] = mut_map_pred[:, :]
#Down-scale PAS mutations
if pas_downscale_mode != 'frac' :
#max_val = np.max(np.abs(mut_map[:, 50:50+6]))
target_val = pas_downscaling
max_val = np.min(np.sum(mut_map[:, :], axis=0)) / 3.0
pas_downscaling = target_val / max_val
mut_map[:, 50:50+6] = mut_map[:, 50:50+6] * pas_downscaling
mut_map_pred[:, 50:50+6] = mut_map_pred[:, 50:50+6] * pas_downscaling
#Slice according to seq trim index
ref_seq = ref_seq[seq_trim_start: seq_trim_end]
mut_map = mut_map[:, seq_trim_start: seq_trim_end]
mut_map_pred = mut_map_pred[:, seq_trim_start: seq_trim_end]
if mode != 'double' :
fig, ax = plt.subplots(2, 1, figsize=figsize)
else :
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
bias = np.max(np.sum(mut_map[:, :], axis=0)) / 3.0 + 0.5
max_score = np.min(np.sum(mut_map[:, :], axis=0)) / 3.0 * -1 + bias
for i in range(plot_start, plot_end) :
mutability_score = np.sum(mut_map[:, i]) / 3.0 * -1 + bias
letterAt(ref_seq[i], i + 0.5, 0, mutability_score, ax[0])
ax[0].plot([0, mut_map.shape[1]], [bias, bias], color='black', linestyle='--')
for index, row in df_gene.iterrows() :
snv_pos = row['snv_pos'] - seq_trim_start
if row['wt_seq'][seq_trim_start: seq_trim_end] != ref_seq or snv_pos >= seq_trim_end :
continue
base = 0
if index[row['snv_pos']] == 'A' :
base = 0
elif index[row['snv_pos']] == 'C' :
base = 1
elif index[row['snv_pos']] == 'G' :
base = 2
elif index[row['snv_pos']] == 'T' :
base = 3
if row['significance'] in ['Pathogenic', 'Likely pathogenic'] and mark_pathogenic :
ax[1].add_patch(Rectangle((snv_pos + border_eta, 3 - base + border_eta), 1 - 2.*border_eta, 1 - 2.*border_eta, fill=False, edgecolor='red', lw=4))
elif row['significance'] in ['Benign', 'Likely benign'] and mark_benign :
ax[1].add_patch(Rectangle((snv_pos + border_eta, 3 - base + border_eta), 1 - 2.*border_eta, 1 - 2.*border_eta, fill=False, edgecolor='darkgreen', lw=4))
elif row['significance'] in ['Undetermined'] and mark_undetermined :
ax[1].add_patch(Rectangle((snv_pos + border_eta, 3 - base + border_eta), 1 - 2.*border_eta, 1 - 2.*border_eta, fill=False, edgecolor='darkblue', lw=4))
plt.sca(ax[0])
plt.yticks([0.5, bias, max_score], [round(bias - 0.5, 2), 0, round((max_score - bias) * -1, 2)], fontsize=16)
plt.xticks(fontsize=16)
plt.xlim((plot_start, plot_end))
plt.ylim((0, max_score))
plt.tight_layout()
if mode == 'subtract' :
subtract_map = mut_map - mut_map_pred
pcm = ax[1].pcolor(subtract_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
elif mode == 'double' :
double_map = np.zeros((8, mut_map.shape[1]))
double_map[[0, 2, 4, 6], :] = mut_map[:, :]
double_map[[1, 3, 5, 7], :] = mut_map_pred[:, :]
pcm = ax[1].pcolor(double_map, cmap='RdBu_r', vmin=-np.abs(double_map).max(), vmax=np.abs(double_map).max())
else :
pcm = ax[1].pcolor(mut_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
#fig.colorbar(pcm, ax=ax[1])
if mode == 'both' :
for i in range(mut_map_pred.shape[0]) :
for j in range(mut_map_pred.shape[1]) :
verts = [((j, i), (j+1, i), (j+1, i+1))]
intensities = np.array([mut_map_pred[i, j]])
c = collections.PolyCollection(verts)
norm = mpl.colors.Normalize(vmin=-np.abs(mut_map_pred).max(), vmax=np.abs(mut_map_pred).max())
rgb_vals = cm.ScalarMappable(norm=norm, cmap=cm.get_cmap('RdBu_r')).to_rgba(intensities)
c.set_facecolors(rgb_vals)
ax[1].add_collection(c)
plt.sca(ax[1])
ref_seq_list = []
for c in ref_seq :
ref_seq_list.append(c)
plt.xticks(np.arange(len(ref_seq)) + 0.5, ref_seq_list)
plt.xticks([], [])
if mode == 'double' :
plt.yticks([0.5, 1.5, 2.5, 3.5, 4.5, 5.5, 6.5, 7.5], ['T', 'T', 'G', 'G', 'C', 'C', 'A', 'A'], fontsize=16)
plt.axis([plot_start, plot_end, 0, 8])
else :
plt.yticks([0.5, 1.5, 2.5, 3.5], ['T', 'G', 'C', 'A'], fontsize=16)
plt.axis([plot_start, plot_end, 0, 4])
plt.gca().xaxis.tick_top()
#plt.savefig(name + '.svg', bbox_inches='tight')
#plt.savefig(name + '.png', bbox_inches='tight')
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
return np.min(np.sum(mut_map[:, :], axis=0)) / 3.0
def mut_map_with_cuts(df, gene_name, cut_snvs, mode, column_suffix='', figsize=(12, 6), height_ratios=[6, 2, 2], bg_alpha=0.5, plot_simple_mutmap=True, annotate_folds=True, plot_true_cuts=True, plot_pred_cuts=False, scale_pred_cuts=False, fold_change_from_cut_range=None, ref_var_scales=[0.3, 0.7], border_eta = 0.085, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, plot_as_bars=True, pas_downscaling=0.5, fig_name=None, fig_dpi=300) :
mut_map = np.zeros((4, 164))
df_gene = df.query("gene == '" + gene_name + "'")
ref_seq = df_gene['wt_seq'].values[0]
for index, row in df_gene.iterrows() :
snv_pos = row['snv_pos']
if row['wt_seq'] != ref_seq :
continue
delta_logodds_true = row['delta_logodds_' + mode + column_suffix]
if np.isnan(delta_logodds_true) :
delta_logodds_true = 0
base = 0
if index[snv_pos] == 'A' :
base = 0
elif index[snv_pos] == 'C' :
base = 1
elif index[snv_pos] == 'G' :
base = 2
elif index[snv_pos] == 'T' :
base = 3
mut_map[3-base, snv_pos] = delta_logodds_true
#Down-scale PAS mutations
mut_map[:, 50:50+6] = mut_map[:, 50:50+6] * pas_downscaling
#Slice according to seq trim index
ref_seq = ref_seq[seq_trim_start: seq_trim_end]
mut_map = mut_map[:, seq_trim_start: seq_trim_end]
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(3, 1, height_ratios=height_ratios)
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax2 = plt.subplot(gs[2])
ax = [ax0, ax1, ax2]
bias = np.max(np.sum(mut_map[:, :], axis=0)) / 3.0 + 0.5
max_score = np.min(np.sum(mut_map[:, :], axis=0)) / 3.0 * -1 + bias
for i in range(plot_start, plot_end) :
mutability_score = np.sum(mut_map[:, i]) / 3.0 * -1 + bias
color = 'black'
alpha = bg_alpha
char_height = 1
for snv_pos, snv_nt, snv_color in cut_snvs :
if i == snv_pos - seq_trim_start :
#color = snv_color#None
#alpha = 1.0
color = 'black'
alpha = bg_alpha
char_height = ref_var_scales[0]
letterAt(snv_nt, i + 0.5, ref_var_scales[0], ref_var_scales[1], ax[1], color=snv_color, alpha=1.0)
break
if not plot_simple_mutmap :
letterAt(ref_seq[i], i + 0.5, 0, mutability_score, ax[1], color=color, alpha=alpha)
else :
letterAt(ref_seq[i], i + 0.5, 0, char_height, ax[1], color=color, alpha=alpha)
if not plot_simple_mutmap :
ax[1].plot([0, mut_map.shape[1]], [bias, bias], color='black', linestyle='--')
plt.sca(ax[1])
if not plot_simple_mutmap :
plt.yticks([0.5, bias, max_score], [round(bias - 0.5, 2), 0, round((max_score - bias) * -1, 2)], fontsize=16)
plt.ylim((0, max_score))
else :
plt.yticks([], [])
plt.ylim((0, ref_var_scales[0] + ref_var_scales[1]))
plt.axis('off')
plt.xlim((plot_start, plot_end))
plt.tight_layout()
pcm = ax[2].pcolor(mut_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
#fig.colorbar(pcm, ax=ax[1])
plt.sca(ax[2])
ref_seq_list = []
for c in ref_seq :
ref_seq_list.append(c)
#plt.xticks(np.arange(len(ref_seq)) + 0.5, ref_seq_list)
plt.xticks([], [])
plt.yticks([0.5, 1.5, 2.5, 3.5], ['T', 'G', 'C', 'A'], fontsize=16)
#plt.gca().xaxis.tick_top()
#plt.xticks(fontsize=16)
plt.axis([plot_start, plot_end, 0, 4])
for i in range(plot_start, plot_end) :
for j in range(0, 4) :
base = 'A'
if j == 3 :
base = 'A'
elif j == 2 :
base = 'C'
elif j == 1 :
base = 'G'
elif j == 0 :
base = 'T'
is_marked = False
for snv_pos, snv_nt, _ in cut_snvs :
if i == snv_pos - seq_trim_start and base == snv_nt :
is_marked = True
break
if not is_marked :
ax[2].add_patch(Rectangle((i, j), 1, 1, fill=True, facecolor='white', alpha=1. - bg_alpha, edgecolor=None))
ref_cut_true = df_gene['cut_prob_true_ref'].values[0][seq_trim_start: seq_trim_end]
ref_cut_pred = df_gene['cut_prob_pred_ref'].values[0][seq_trim_start: seq_trim_end]
max_y_var_hat = 0
for snv_pos, snv_nt, snv_color in cut_snvs :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
var_cut_true = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_var'][0][seq_trim_start: seq_trim_end]
var_cut_pred = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0][seq_trim_start: seq_trim_end]
if scale_pred_cuts :
ref_pred_logodds = np.zeros(ref_cut_pred.shape)
var_pred_logodds = np.zeros(var_cut_pred.shape)
ref_pred_logodds[ref_cut_pred > 0.0] = np.log(ref_cut_pred[ref_cut_pred > 0.0] / (1.0 - ref_cut_pred[ref_cut_pred > 0.0]))
var_pred_logodds[var_cut_pred > 0.0] = np.log(var_cut_pred[var_cut_pred > 0.0] / (1.0 - var_cut_pred[var_cut_pred > 0.0]))
pred_fold_change = np.exp(var_pred_logodds - ref_pred_logodds)
#var_cut_pred = ref_cut_true * pred_fold_change
ref_cut_true_odds = ref_cut_true / (1. - ref_cut_true)
var_cut_pred_odds = ref_cut_true_odds * pred_fold_change
var_cut_pred = var_cut_pred_odds / (1. + var_cut_pred_odds)
if plot_true_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(var_cut_true[plot_start:plot_end]))
if plot_pred_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(var_cut_pred[plot_start:plot_end]))
if plot_as_bars :
if plot_true_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_true[plot_start:plot_end], color=snv_color, alpha=0.85, where='mid', linewidth=3)
if plot_pred_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_pred[plot_start:plot_end], color=snv_color, linestyle='--', alpha=0.85, where='mid', linewidth=3)
else :
if plot_true_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_true[plot_start:plot_end], color=snv_color, linestyle='-', linewidth=3, alpha=0.7)
if plot_pred_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_pred[plot_start:plot_end], color=snv_color, linestyle='--', linewidth=3, alpha=0.7)
#Highlight specific snv in mutation map
base = 0
if snv_nt == 'A' :
base = 0
elif snv_nt == 'C' :
base = 1
elif snv_nt == 'G' :
base = 2
elif snv_nt == 'T' :
base = 3
#ax[2].add_patch(Rectangle((snv_pos, 3 - base), 1, 1, fill=False, edgecolor=snv_color, lw=4))
ax[2].add_patch(Rectangle((snv_pos - seq_trim_start + border_eta, 3 - base + border_eta), 1 - 2.*border_eta, 1 - 2.*border_eta, fill=False, edgecolor=snv_color, lw=4))
#ax[1].add_patch(Rectangle((snv_pos, 0), 1, max_score, fill=False, edgecolor=snv_color, lw=4))
if plot_true_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(ref_cut_true[plot_start:plot_end]))
if plot_pred_cuts and not scale_pred_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(ref_cut_pred[plot_start:plot_end]))
#Annotate min/max delta isoform log odds
min_mutmap_logodds = round((max_score - bias) * -1, 2)
max_mutmap_logodds = round(bias - 0.5, 2)
annot_text = 'Min = ' + str(min_mutmap_logodds) + '\nMax = ' + str(max_mutmap_logodds)
ax[0].text(0.05, 0.80, annot_text,
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color='black', fontsize=16, weight="bold")
snv_i = 0
for snv_pos, snv_nt, snv_color in cut_snvs :
if annotate_folds :
if plot_true_cuts :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
df_pos = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]
fold_change = np.exp(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['delta_logodds_true' + column_suffix][0])
if fold_change_from_cut_range :
fold_range_start = fold_change_from_cut_range[0]
fold_range_end = fold_change_from_cut_range[1]
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0][fold_range_start: fold_range_end])
var_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_var'][0][fold_range_start: fold_range_end])
fold_change = (var_p / (1. - var_p)) / (ref_p / (1. - ref_p))
fold_color = 'darkgreen'
if fold_change < 1. :
fold_color = 'red'
fold_change = 1. / fold_change
#fold_color = snv_color
row_multiplier = 0.1
row_bias = 0
if plot_pred_cuts :
row_multiplier = 0.2
ax[0].text(0.70, 0.80 - row_multiplier * snv_i, snv_nt + ':',
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=snv_color, fontsize=16, weight="bold")
ax[0].text(0.73, 0.80 - row_multiplier * snv_i, 'Fold change = ' + str(round(fold_change, 2)),
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=fold_color, fontsize=16, weight="bold")
if plot_pred_cuts :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
df_pos = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]
fold_change = np.exp(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['delta_logodds_pred' + column_suffix][0])
if fold_change_from_cut_range :
fold_range_start = fold_change_from_cut_range[0]
fold_range_end = fold_change_from_cut_range[1]
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_ref'][0][fold_range_start: fold_range_end])
var_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0][fold_range_start: fold_range_end])
if scale_pred_cuts :
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0][fold_range_start: fold_range_end])
ref_cut_true_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0]#[0: seq_trim_end]
ref_cut_pred_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_ref'][0]#[0: seq_trim_end]
var_cut_pred_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0]#[0: seq_trim_end]
ref_pred_logodds = np.zeros(ref_cut_pred_t.shape)
var_pred_logodds = np.zeros(var_cut_pred_t.shape)
ref_pred_logodds[ref_cut_pred_t > 0.0] = np.log(ref_cut_pred_t[ref_cut_pred_t > 0.0] / (1.0 - ref_cut_pred_t[ref_cut_pred_t > 0.0]))
var_pred_logodds[var_cut_pred_t > 0.0] = np.log(var_cut_pred_t[var_cut_pred_t > 0.0] / (1.0 - var_cut_pred_t[var_cut_pred_t > 0.0]))
pred_fold_change = np.exp(var_pred_logodds - ref_pred_logodds)
#var_cut_pred = ref_cut_true * pred_fold_change
ref_cut_true_odds = ref_cut_true_t / (1. - ref_cut_true_t)
var_cut_pred_odds = ref_cut_true_odds * pred_fold_change
var_cut_pred_t = var_cut_pred_odds / (1. + var_cut_pred_odds)
var_p = np.sum(var_cut_pred_t[fold_range_start: fold_range_end])
fold_change = (var_p / (1. - var_p)) / (ref_p / (1. - ref_p))
fold_color = 'darkgreen'
if fold_change < 1. :
fold_color = 'red'
fold_change = 1. / fold_change
#fold_color = snv_color
row_multiplier = 0.1
row_bias = 0.0
if plot_true_cuts :
row_multiplier = 0.2
row_bias = 0.1
ax[0].text(0.70, 0.80 - row_multiplier * snv_i - row_bias, snv_nt + ':',
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=snv_color, fontsize=16, weight="bold")
ax[0].text(0.73, 0.80 - row_multiplier * snv_i - row_bias, 'Predicted change = ' + str(round(fold_change, 2)),
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=fold_color, fontsize=16, weight="bold")
snv_i += 1
#Plot reference cut distribution
if plot_as_bars :
if plot_true_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_true[plot_start:plot_end], color='black', alpha=0.85, where='mid', linewidth=3)
if plot_pred_cuts and not scale_pred_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_pred[plot_start:plot_end], color='black', linestyle='--', alpha=0.85, where='mid', linewidth=3)
else :
if plot_true_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_true[plot_start:plot_end], color='black', linestyle='-', linewidth=3, alpha=0.7)
if plot_pred_cuts and not scale_pred_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_pred[plot_start:plot_end], color='black', linestyle='--', linewidth=3, alpha=0.7)
#ax[0].plot([57, 57], [0, max(np.max(ref_cut[:164]), max_y_var_hat)], color='green', linestyle='--', linewidth=3)
#ax[0].plot([97, 97], [0, max(np.max(ref_cut[:164]), max_y_var_hat)], color='green', linestyle='--', linewidth=3)
plt.sca(ax[0])
plt.xlim((plot_start, plot_end))
plt.yticks(fontsize=16)
plt.ylim(0, max_y_var_hat * 1.02)
plt.tick_params(axis='x', which='both', bottom=False, top=False, labelbottom=False)
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
def append_apadb_isoform_usage(apadb_df, human_variant_df, human_variant_df_filtered) :
#Add native APADB isoform abundance measurements
pseudo_count = 1.0
apadb_df['reads'] = apadb_df['reads'] + pseudo_count
apadb_df = apadb_df.sort_values(by='reads')
keep_index = []
marked_genes = {}
for index, row in apadb_df.iterrows() :
if row['gene'] not in marked_genes :
marked_genes[row['gene']] = True
keep_index.append(index)
apadb_df = apadb_df.loc[keep_index]
apadb_df['gene_fam'] = apadb_df['gene'].apply(lambda x: x.split('.')[0])
apadb_df['total_reads'] = apadb_df.groupby('gene_fam')['reads'].transform('sum')
apadb_df['native_usage'] = apadb_df['reads'] / apadb_df['total_reads']
apadb_df = apadb_df[['gene', 'native_usage']]
apadb_df = apadb_df.set_index('gene')
#Add scaled dPSI measure to variant array dataframe
apadb_variant_df = human_variant_df.join(apadb_df, on='gene', how='left')
apadb_variant_df_filtered = human_variant_df_filtered.join(apadb_df, on='gene', how='left')
apadb_variant_df['native_usage_ref'] = apadb_variant_df['native_usage']
apadb_variant_df['native_usage_true_var'] = 1. - 1. / (1. + (apadb_variant_df['native_usage'] / (1. - apadb_variant_df['native_usage'])) * np.exp(apadb_variant_df['delta_logodds_true']))
apadb_variant_df['native_usage_pred_var'] = 1. - 1. / (1. + (apadb_variant_df['native_usage'] / (1. - apadb_variant_df['native_usage'])) * np.exp(apadb_variant_df['delta_logodds_pred']))
apadb_variant_df['delta_psi_true'] = apadb_variant_df['native_usage_true_var'] - apadb_variant_df['native_usage_ref']
apadb_variant_df['delta_psi_pred'] = apadb_variant_df['native_usage_pred_var'] - apadb_variant_df['native_usage_ref']
apadb_variant_df_filtered['native_usage_ref'] = apadb_variant_df_filtered['native_usage']
apadb_variant_df_filtered['native_usage_true_var'] = 1. - 1. / (1. + (apadb_variant_df_filtered['native_usage'] / (1. - apadb_variant_df_filtered['native_usage'])) * np.exp(apadb_variant_df_filtered['delta_logodds_true']))
apadb_variant_df_filtered['native_usage_pred_var'] = 1. - 1. / (1. + (apadb_variant_df_filtered['native_usage'] / (1. - apadb_variant_df_filtered['native_usage'])) * np.exp(apadb_variant_df_filtered['delta_logodds_pred']))
apadb_variant_df_filtered['delta_psi_true'] = apadb_variant_df_filtered['native_usage_true_var'] - apadb_variant_df_filtered['native_usage_ref']
apadb_variant_df_filtered['delta_psi_pred'] = apadb_variant_df_filtered['native_usage_pred_var'] - apadb_variant_df_filtered['native_usage_ref']
return apadb_variant_df, apadb_variant_df_filtered
def plot_count_regions(df, no_denovo=False, plot_frac=False, count_vs_all=True, annotation_height=1.0, significance_level=1.0, delta_threshes=[], delta_linestyles=[], figsize=(8,6), fig_name=None, plot_start=-50, plot_end=100, fig_dpi=300, pred_column='delta_logodds_pred', true_column='delta_logodds_true', snv_pos_column='snv_pos') :
fig = plt.figure(figsize=figsize)
df_orig = df.copy()
df = df.query("delta_p_val < " + str(significance_level))
df = df.query("variant != 'indel'")
if no_denovo :
pas_dict = {
'AATAAA' : True,
'ATTAAA' : True,
'TATAAA' : True,
'GATAAA' : True,
'CATAAA' : True,
'AGTAAA' : True,
'ACTAAA' : True,
}
keep_index = []
i = 0
for index, row in df.iterrows() :
snv_pos = row['snv_pos']
var_region = index[snv_pos-5:snv_pos+5+1]
ref_region = row['wt_seq'][snv_pos-5:snv_pos+5+1]
pas_disturbed = False
for pas in pas_dict :
if pas in var_region and pas not in ref_region :
pas_disturbed = True
break
elif pas not in var_region and pas in ref_region :
pas_disturbed = True
break
if not pas_disturbed :
keep_index.append(i)
i += 1
df = df.iloc[keep_index]
border_eta = 0.00
ax = plt.gca()
ax.add_patch(Rectangle((-50 + border_eta, -annotation_height + border_eta), 50 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((0 + border_eta, -annotation_height + border_eta), 6 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='darkgreen', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((6 + border_eta, -annotation_height + border_eta), 54 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((60 + border_eta, -annotation_height + border_eta), 75 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
use_start = plot_start
use_end = 0
if use_end - use_start > 10 :
ax.text(use_start + (use_end - use_start) / 2., -annotation_height/2., 'USE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
dse_start = 6
dse_end = min(60, plot_end)
if dse_end - dse_start > 10 :
ax.text(dse_start + (dse_end - dse_start) / 2., -annotation_height/2., 'DSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
#USE upregulatory
snv_coords = np.arange(50)
df_sel = df.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true > 0.0")
ls = []
for delta_thresh, delta_style in zip(delta_threshes, delta_linestyles) :
df_above = df_sel.query("delta_logodds_true > " + str(delta_thresh))
snv_count = len(df_above)
orig_count = len(df_orig.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true > 0.0"))
if count_vs_all :
orig_count = len(df_orig.query("delta_logodds_true > 0.0"))
frac = float(snv_count) / float(orig_count)
l1 = None
if plot_frac :
#print('USE upregulatory, delta > ' + str(delta_thresh) + ', frac = ' + str(frac))
l1 = plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [frac, frac], color='black', linewidth=1.5, linestyle=delta_style, label='Fold Change > ' + str(round(np.exp(delta_thresh), 1)))
else :
#print('USE upregulatory, delta > ' + str(delta_thresh) + ', count = ' + str(snv_count))
l1 = plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [snv_count, snv_count], color='black', linewidth=1.5, linestyle=delta_style, label='Fold Change > ' + str(round(np.exp(delta_thresh), 1)))
ls.append(l1[0])
#USE downregulatory
snv_coords = np.arange(50)
df_sel = df.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true < 0.0").sort_values(by='delta_logodds_true', ascending=False)
for delta_thresh, delta_style in zip(delta_threshes, delta_linestyles) :
df_above = df_sel.query("delta_logodds_true < -" + str(delta_thresh))
snv_count = len(df_above)
orig_count = len(df_orig.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true < 0.0"))
if count_vs_all :
orig_count = len(df_orig.query("delta_logodds_true < 0.0"))
frac = float(snv_count) / float(orig_count)
if plot_frac :
#print('USE downregulatory, delta < ' + str(delta_thresh) + ', frac = ' + str(frac))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [-frac-annotation_height, -frac-annotation_height], color='black', linewidth=1.5, linestyle=delta_style)
else :
#print('USE downregulatory, delta < ' + str(delta_thresh) + ', count = ' + str(snv_count))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [-snv_count-annotation_height, -snv_count-annotation_height], color='black', linewidth=1.5, linestyle=delta_style)
#DSE upregulatory
snv_coords = np.arange(dse_end) + 56
df_sel = df.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true > 0.0")
for delta_thresh, delta_style in zip(delta_threshes, delta_linestyles) :
df_above = df_sel.query("delta_logodds_true > " + str(delta_thresh))
snv_count = len(df_above)
orig_count = len(df_orig.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true > 0.0"))
if count_vs_all :
orig_count = len(df_orig.query("delta_logodds_true > 0.0"))
frac = float(snv_count) / float(orig_count)
if plot_frac :
#print('DSE upregulatory, delta > ' + str(delta_thresh) + ', frac = ' + str(frac))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [frac, frac], color='black', linewidth=1.5, linestyle=delta_style)
else :
#print('DSE upregulatory, delta > ' + str(delta_thresh) + ', count = ' + str(snv_count))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [snv_count, snv_count], color='black', linewidth=1.5, linestyle=delta_style)
#DSE downregulatory
snv_coords = np.arange(dse_end) + 56
df_sel = df.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true < 0.0")
for delta_thresh, delta_style in zip(delta_threshes, delta_linestyles) :
df_above = df_sel.query("delta_logodds_true < -" + str(delta_thresh))
snv_count = len(df_above)
orig_count = len(df_orig.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true < 0.0"))
if count_vs_all :
orig_count = len(df_orig.query("delta_logodds_true < 0.0"))
frac = float(snv_count) / float(orig_count)
if plot_frac :
#print('DSE downregulatory, delta < ' + str(delta_thresh) + ', frac = ' + str(frac))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [-frac-annotation_height, -frac-annotation_height], color='black', linewidth=1.5, linestyle=delta_style)
else :
#print('DSE downregulatory, delta < ' + str(delta_thresh) + ', count = ' + str(snv_count))
plt.plot([snv_coords[0] - 50, snv_coords[-1] - 50], [-snv_count-annotation_height, -snv_count-annotation_height], color='black', linewidth=1.5, linestyle=delta_style)
if plot_start != -50 or plot_end != 100 :
plt.xticks([plot_start, 0, 6, plot_end], [plot_start, 0, 6, plot_end], fontsize=18)
else :
plt.xticks([-100, -50, -25, 0, 6, 25, 50, 100], [-100, -50, -25, 0, 6, 25, 50, 100], fontsize=18)
plt.yticks([-0.5, -0.4, -0.3, -0.2, -0.1, 0.0, 0.1, 0.2, 0.3], [0.4, 0.3, 0.2, 0.1, 0.0, 0.0, 0.1, 0.2, 0.3], fontsize=16)
plt.xlabel('Position relative to pPAS', fontsize=18)
plt.ylabel('Fraction of library', fontsize=18)
plt.title('Summary Delta Usage', fontsize=18)
plt.xlim(plot_start, dse_end)
plt.legend(handles=ls, fontsize=12, loc='upper left')
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', dpi=fig_dpi, transparent=True)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
def plot_perc_positions(df, no_denovo=False, significance_level=1.0, percentiles=[], percentile_colors=[], figsize=(8,6), fig_name=None, plot_start=-50, plot_end=100, fig_dpi=300, pred_column='delta_logodds_pred', true_column='delta_logodds_true', snv_pos_column='snv_pos') :
fig = plt.figure(figsize=figsize)
df = df.query("delta_p_val < " + str(significance_level))
df = df.query("variant != 'indel'")
if no_denovo :
pas_dict = {
'AATAAA' : True,
'ATTAAA' : True,
'TATAAA' : True,
'GATAAA' : True,
'CATAAA' : True,
'AGTAAA' : True,
'ACTAAA' : True,
}
keep_index = []
i = 0
for index, row in df.iterrows() :
snv_pos = row['snv_pos']
var_region = index[snv_pos-5:snv_pos+5+1]
ref_region = row['wt_seq'][snv_pos-5:snv_pos+5+1]
pas_disturbed = False
for pas in pas_dict :
if pas in var_region and pas not in ref_region :
pas_disturbed = True
break
elif pas not in var_region and pas in ref_region :
pas_disturbed = True
break
if not pas_disturbed :
keep_index.append(i)
i += 1
df = df.iloc[keep_index]
annotation_height = 1.0
border_eta = 0.00
ax = plt.gca()
ax.add_patch(Rectangle((-50 + border_eta, -annotation_height + border_eta), 50 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((0 + border_eta, -annotation_height + border_eta), 6 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='darkgreen', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((6 + border_eta, -annotation_height + border_eta), 54 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
ax.add_patch(Rectangle((60 + border_eta, -annotation_height + border_eta), 75 - 2.*border_eta, annotation_height - 2.*border_eta, fill=True, facecolor='white', edgecolor='black', lw=1.5))
use_start = plot_start
use_end = 0
if use_end - use_start > 10 :
ax.text(use_start + (use_end - use_start) / 2., -annotation_height/2., 'USE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
dse_start = 6
dse_end = min(60, plot_end)
if dse_end - dse_start > 10 :
ax.text(dse_start + (dse_end - dse_start) / 2., -annotation_height/2., 'DSE', horizontalalignment='center', verticalalignment='center', color='black', fontsize=16, weight="bold")
ls = []
#USE upregulatory
snv_coords = np.arange(50)
df_sel = df.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true > 0.0").sort_values(by='delta_logodds_true')
for percentile, percentile_color in zip(percentiles, percentile_colors) :
snv_means = np.zeros(snv_coords.shape[0])
snv_counts = np.zeros(snv_coords.shape[0])
unique_snv_pos = df_sel['snv_pos'].unique()
for snv_pos in unique_snv_pos :
df_snv_pos = df_sel.query("snv_pos == " + str(snv_pos))
df_perc = df_snv_pos.sort_values(by='delta_logodds_true').iloc[int(percentile * len(df_snv_pos)):]
snv_means[snv_coords == snv_pos] = np.min(np.ravel(df_perc['delta_logodds_true'].values))
snv_counts[snv_coords == snv_pos] = len(df_perc)
l1 = plt.plot(snv_coords - 50, snv_means, color=percentile_color, linewidth=1.5, label='Percentile >= ' + str(int(percentile * 100)) + '%')
ls.append(l1[0])
#USE downregulatory
snv_coords = np.arange(50)
df_sel = df.query("snv_pos >= 0 and snv_pos <= 49 and delta_logodds_true < 0.0").sort_values(by='delta_logodds_true', ascending=False)
for percentile, percentile_color in zip(percentiles, percentile_colors) :
snv_means = np.zeros(snv_coords.shape[0])
snv_counts = np.zeros(snv_coords.shape[0])
unique_snv_pos = df_sel['snv_pos'].unique()
for snv_pos in unique_snv_pos :
df_snv_pos = df_sel.query("snv_pos == " + str(snv_pos))
df_perc = df_snv_pos.sort_values(by='delta_logodds_true', ascending=False).iloc[int(percentile * len(df_snv_pos)):]
snv_means[snv_coords == snv_pos] = np.max(np.ravel(df_perc['delta_logodds_true'].values))
snv_counts[snv_coords == snv_pos] = len(df_perc)
plt.plot(snv_coords - 50, snv_means - 1, color=percentile_color, linewidth=1.5)
#DSE upregulatory
snv_coords = np.arange(dse_end) + 56
df_sel = df.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true > 0.0")
for percentile, percentile_color in zip(percentiles, percentile_colors) :
snv_means = np.zeros(snv_coords.shape[0])
snv_counts = np.zeros(snv_coords.shape[0])
unique_snv_pos = df_sel['snv_pos'].unique()
for snv_pos in unique_snv_pos :
df_snv_pos = df_sel.query("snv_pos == " + str(snv_pos))
df_perc = df_snv_pos.sort_values(by='delta_logodds_true').iloc[int(percentile * len(df_snv_pos)):]
snv_means[snv_coords == snv_pos] = np.min(np.ravel(df_perc['delta_logodds_true'].values))
snv_counts[snv_coords == snv_pos] = len(df_perc)
plt.plot(snv_coords - 50, snv_means, color=percentile_color, linewidth=1.5)
#DSE downregulatory
snv_coords = np.arange(dse_end) + 56
df_sel = df.query("snv_pos >= 56 and snv_pos < " + str(snv_coords[-1]) + " and delta_logodds_true < 0.0")
for percentile, percentile_color in zip(percentiles, percentile_colors) :
snv_means = np.zeros(snv_coords.shape[0])
snv_counts = np.zeros(snv_coords.shape[0])
unique_snv_pos = df_sel['snv_pos'].unique()
for snv_pos in unique_snv_pos :
df_snv_pos = df_sel.query("snv_pos == " + str(snv_pos))
df_perc = df_snv_pos.sort_values(by='delta_logodds_true', ascending=False).iloc[int(percentile * len(df_snv_pos)):]
snv_means[snv_coords == snv_pos] = np.max(np.ravel(df_perc['delta_logodds_true'].values))
snv_counts[snv_coords == snv_pos] = len(df_perc)
plt.plot(snv_coords - 50, snv_means - 1, color=percentile_color, linewidth=1.5)
if plot_start != -50 or plot_end != 100 :
plt.xticks([plot_start, 0, 6, plot_end], [plot_start, 0, 6, plot_end], fontsize=18)
else :
plt.xticks([-100, -50, -25, 0, 6, 25, 50, 100], [-100, -50, -25, 0, 6, 25, 50, 100], fontsize=18)
plt.yticks([-3, -1, 0, 2], [-2, 0, 0, 2], fontsize=18)
#plt.axis([np.min(snv_pos), np.max(snv_pos), np.min((delta_logodds_true)), np.max((delta_logodds_true))])
plt.axis([-50, 100, -6., 4.])
plt.xlabel('Position relative to pPAS', fontsize=18)
plt.ylabel('Observed Delta pPAS logodds', fontsize=18)
plt.title('Summary Delta Usage', fontsize=18)
plt.xlim(plot_start, dse_end)#plot_end)
plt.ylim(-3, 2)
plt.legend(handles=ls, fontsize=12, loc='upper left')
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', dpi=fig_dpi, transparent=True)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
def plot_individual_tgta_map(wt_seq, tgta_1_df, tgta_2_df, plot_start=None, plot_end=None, vmin=-1, vmax=1, interpolate_missing=False, plot_symmetric_doubles=True) :
wt_seq_len = len(tgta_1_df['wt_seq'].values[0])
tgta_1_seq_df = tgta_1_df
tgta_2_seq_df = tgta_2_df
if wt_seq != 'mean' :
tgta_1_seq_df = tgta_1_df.query("wt_seq == '" + wt_seq + "'")
tgta_2_seq_df = tgta_2_df.query("wt_seq == '" + wt_seq + "'")
min_pos = 1000
max_pos = 0
min_pos_1 = 1000
max_pos_1 = 0
min_pos_2 = 1000
max_pos_2 = 0
delta_vec = np.zeros((1, wt_seq_len))
delta_mat = np.zeros((wt_seq_len, wt_seq_len))
count_vec = np.zeros((1, wt_seq_len))
count_mat = np.zeros((wt_seq_len, wt_seq_len))
unmapped_pos = {}
for i in range(0, delta_vec.shape[1]) :
unmapped_pos[i] = True
unmapped_double_pos = {}
for i in range(0, delta_vec.shape[1]) :
j_end = delta_vec.shape[1]
if not plot_symmetric_doubles :
j_end = i
for j in range(0, j_end) :
unmapped_double_pos[str(i) + '_' + str(j)] = True
for index, row in tgta_1_seq_df.iterrows() :
pos = row['tgta_pos_1']
if pos < min_pos :
min_pos = pos
if pos > max_pos :
max_pos = pos
delta_logodds_true = row['delta_logodds_true']
delta_vec[0, pos] += delta_logodds_true
count_vec[0, pos] += 1.
unmapped_pos[pos] = False
delta_vec[count_vec > 0] = delta_vec[count_vec > 0] / count_vec[count_vec > 0]
if interpolate_missing :
for pos in unmapped_pos :
if unmapped_pos[pos] == True and pos > min_pos and pos < max_pos :
delta_vec[0, pos] = (delta_vec[0, pos-1] + delta_vec[0, pos+1]) / 2.
#Plot overlayed sequence
fig = None
if wt_seq != 'mean' :
fig = plt.figure(figsize=(12, 1.5))
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 2])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
for j in range(min_pos, max_pos) :
letterAt(wt_seq[j], j + 0.5, 0, 1, ax[0])
plt.sca(ax[0])
if plot_start is not None and plot_end is not None :
plt.xlim((plot_start, plot_end))
else :
plt.xlim((min_pos, max_pos))
plt.ylim((0, 1))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
else :
fig = plt.figure(figsize=(12, 1))
ax = [None, plt.gca()]
ax[1].pcolor(delta_vec, cmap='bwr', vmin=vmin, vmax=vmax)
if not interpolate_missing :
for pos in unmapped_pos :
if unmapped_pos[pos] == True and pos >= min_pos and pos <= max_pos :
ax[1].plot([pos, pos+1], [0, 1], color='black', alpha=0.5, linewidth=2, linestyle='--')
ax[1].plot([pos+1, pos], [0, 1], color='black', alpha=0.5, linewidth=2, linestyle='--')
plt.sca(ax[1])
if plot_start is not None and plot_end is not None :
plt.xticks(np.array([plot_start, 50, 56, plot_end - 1]) + 0.5, [plot_start - 50, 0, 6, plot_end - 1 - 50], fontsize=18)
plt.xlim((plot_start, plot_end))
else :
plt.xticks(np.array([min_pos, 50, 56, max_pos - 1]) + 0.5, [min_pos - 50, 0, 6, max_pos - 1 - 50], fontsize=18)
plt.xlim((min_pos, max_pos))
plt.yticks([], [])
#plt.tight_layout()
plt.show()
#Scatter of observed vs. predicted logodds
r_val, _ = pearsonr(tgta_1_seq_df['delta_logodds_pred'], tgta_1_seq_df['delta_logodds_true'])
f = plt.figure(figsize=(4, 4))
plt.scatter(tgta_1_seq_df['delta_logodds_pred'], tgta_1_seq_df['delta_logodds_true'], alpha=0.5, s=4, c='black')
annot_text = 'R^2 = ' + str(round(r_val * r_val, 2))
annot_text += '\nn = ' + str(len(tgta_1_seq_df))
ax = plt.gca()
ax.text(0.05, 0.95, annot_text, horizontalalignment='left', verticalalignment='top', transform=ax.transAxes, color='black', fontsize=16, weight="bold")
plt.xticks(fontsize=18)
plt.yticks(fontsize=18)
plt.xlabel('Predicted Delta pPAS Logodds', fontsize=16)
plt.ylabel('Observed Delta pPAS Logodds', fontsize=16)
plt.title('Single TGTA', fontsize=18)
plt.tight_layout()
plt.show()
if len(tgta_2_seq_df) == 0 :
return
for index, row in tgta_2_seq_df.iterrows() :
pos1 = row['tgta_pos_1']
pos2 = row['tgta_pos_2']
delta_logodds_true = row['delta_logodds_true']
delta_mat[pos1, pos2] += delta_logodds_true
count_mat[pos1, pos2] += 1.
if plot_symmetric_doubles :
delta_mat[pos2, pos1] += delta_logodds_true
count_mat[pos2, pos1] += 1.
if pos1 < min_pos_1 :
min_pos_1 = pos1
if pos1 > max_pos_1 :
max_pos_1 = pos1
if pos2 < min_pos_2 :
min_pos_2 = pos2
if pos2 > max_pos_2 :
max_pos_2 = pos2
unmapped_double_pos[str(pos1) + '_' + str(pos2)] = False
unmapped_double_pos[str(pos2) + '_' + str(pos1)] = False
delta_mat[count_mat > 0] = delta_mat[count_mat > 0] / count_mat[count_mat > 0]
if interpolate_missing :
for pos in unmapped_pos :
if unmapped_pos[pos] == True and pos > min_pos and pos < max_pos :
delta_vec[0, pos] = (delta_vec[0, pos-1] + delta_vec[0, pos+1]) / 2.
for pos_str in unmapped_double_pos :
pos1, pos2 = [int(str_part) for str_part in pos_str.split('_')]
if unmapped_double_pos[pos_str] == True and (pos1 > min_pos_1 and pos1 < max_pos_1) and (pos2 > min_pos_2 and pos2 < max_pos_2) :
delta_mat[pos1, pos2] = delta_mat[pos1-1, pos2-1] + delta_mat[pos1-1, pos2] + delta_mat[pos1-1, pos2+1]
delta_mat[pos1, pos2] += delta_mat[pos1, pos2-1] + delta_mat[pos1, pos2+1]
delta_mat[pos1, pos2] += delta_mat[pos1+1, pos2-1] + delta_mat[pos1+1, pos2] + delta_mat[pos1+1, pos2+1]
delta_mat[pos1, pos2] /= 8.
fig = None
if wt_seq != 'mean' :
fig = plt.figure(figsize=(8, 8.5))
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 16])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
for j in range(min_pos_1, max_pos_1) :
letterAt(wt_seq[j], j + 0.5, 0, 1, ax[0])
plt.sca(ax[0])
if plot_start is not None and plot_end is not None :
plt.xlim((plot_start, plot_end))
else :
plt.xlim((min_pos_1, max_pos_1))
plt.ylim((0, 1))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
else :
fig = plt.figure(figsize=(8, 8))
ax = [None, plt.gca()]
ax[1].pcolor(delta_mat, cmap='bwr', vmin=vmin, vmax=vmax)
if not interpolate_missing :
for pos_str in unmapped_double_pos :
pos1, pos2 = [int(str_part) for str_part in pos_str.split('_')]
if unmapped_double_pos[pos_str] == True and (pos1 >= min_pos_1 and pos1 <= max_pos_1) and (pos2 >= min_pos_2 and pos2 <= max_pos_2) :
ax[1].plot([pos1, pos1+1], [pos2, pos2+1], color='black', alpha=0.5, linewidth=2)
ax[1].plot([pos1, pos1+1], [pos2+1, pos2], color='black', alpha=0.5, linewidth=2)
plt.sca(ax[1])
if plot_start is not None and plot_end is not None :
plt.xticks(np.array([plot_start, 50, 56, plot_end-1]) + 0.5, [plot_start - 50, 0, 6, plot_end-1 - 50], fontsize=18)
plt.xlim((plot_start, plot_end))
else :
plt.xticks(np.array([min_pos_1, 50, 56, max_pos_1-1]) + 0.5, np.array([min_pos_1, 50, 56, max_pos_1-1]) - 50, fontsize=18)
plt.xlim((min_pos_1, max_pos_1))
plt.yticks(np.array([min_pos_2, 50, 56, max_pos_2-1]) + 0.5, np.array([min_pos_2, 50, 56, max_pos_2-1]) - 50, fontsize=18)
plt.ylim((min_pos_2, max_pos_2))
plt.tight_layout()
plt.show()
#Scatter of observed vs. predicted logodds
r_val, _ = pearsonr(tgta_2_seq_df['delta_logodds_pred'], tgta_2_seq_df['delta_logodds_true'])
f = plt.figure(figsize=(4, 4))
plt.scatter(tgta_2_seq_df['delta_logodds_pred'], tgta_2_seq_df['delta_logodds_true'], alpha=0.5, s=4, c='black')
annot_text = 'R^2 = ' + str(round(r_val * r_val, 2))
annot_text += '\nn = ' + str(len(tgta_2_seq_df))
ax = plt.gca()
ax.text(0.05, 0.95, annot_text, horizontalalignment='left', verticalalignment='top', transform=ax.transAxes, color='black', fontsize=16, weight="bold")
plt.xticks(fontsize=18)
plt.yticks(fontsize=18)
plt.xlabel('Predicted Delta pPAS Logodds', fontsize=16)
plt.ylabel('Observed Delta pPAS Logodds', fontsize=16)
plt.title('Double TGTA', fontsize=18)
plt.tight_layout()
plt.show()
def plot_individual_tgta_map_nonlin(wt_seq, tgta_1_df, tgta_2_df, plot_start=None, plot_end=None, vmin=-1, vmax=1, interpolate_missing=False, plot_symmetric_doubles=True) :
wt_seq_len = len(tgta_1_df['wt_seq'].values[0])
tgta_1_seq_df = tgta_1_df
tgta_2_seq_df = tgta_2_df
if wt_seq != 'mean' :
tgta_1_seq_df = tgta_1_df.query("wt_seq == '" + wt_seq + "'")
tgta_2_seq_df = tgta_2_df.query("wt_seq == '" + wt_seq + "'")
if len(tgta_2_seq_df) == 0 :
return
min_pos = 1000
max_pos = 0
min_pos_1 = 1000
max_pos_1 = 0
min_pos_2 = 1000
max_pos_2 = 0
delta_vec = np.zeros((1, wt_seq_len))
delta_mat = np.zeros((wt_seq_len, wt_seq_len))
count_vec = np.zeros((1, wt_seq_len))
count_mat = np.zeros((wt_seq_len, wt_seq_len))
unmapped_pos = {}
for i in range(0, delta_vec.shape[1]) :
unmapped_pos[i] = True
unmapped_double_pos = {}
for i in range(0, delta_vec.shape[1]) :
j_end = delta_vec.shape[1]
if not plot_symmetric_doubles :
j_end = i
for j in range(0, j_end) :
unmapped_double_pos[str(i) + '_' + str(j)] = True
for index, row in tgta_1_seq_df.iterrows() :
pos = row['tgta_pos_1']
if pos < min_pos :
min_pos = pos
if pos > max_pos :
max_pos = pos
delta_logodds_true = row['delta_logodds_true']
delta_vec[0, pos] += delta_logodds_true
count_vec[0, pos] += 1.
unmapped_pos[pos] = False
delta_vec[count_vec > 0] = delta_vec[count_vec > 0] / count_vec[count_vec > 0]
if interpolate_missing :
for pos in unmapped_pos :
if unmapped_pos[pos] == True and pos > min_pos and pos < max_pos :
delta_vec[0, pos] = (delta_vec[0, pos-1] + delta_vec[0, pos+1]) / 2.
for index, row in tgta_2_seq_df.iterrows() :
pos1 = row['tgta_pos_1']
pos2 = row['tgta_pos_2']
delta_logodds_true = row['delta_logodds_true']
seq_var = index
seq_ref_tgta1 = wt_seq[:pos1] + 'TGTA' + wt_seq[pos1+4:]
seq_ref_tgta2 = wt_seq[:pos2] + 'TGTA' + wt_seq[pos2+4:]
delta_logodds_true_tgta1 = tgta_1_seq_df.loc[seq_ref_tgta1]['delta_logodds_true']
delta_logodds_true_tgta2 = tgta_1_seq_df.loc[seq_ref_tgta2]['delta_logodds_true']
delta_mat[pos1, pos2] += (delta_logodds_true - (delta_logodds_true_tgta1 + delta_logodds_true_tgta2))
count_mat[pos1, pos2] += 1.
if plot_symmetric_doubles :
delta_mat[pos2, pos1] += (delta_logodds_true - (delta_logodds_true_tgta1 + delta_logodds_true_tgta2))
count_mat[pos2, pos1] += 1.
if pos1 < min_pos_1 :
min_pos_1 = pos1
if pos1 > max_pos_1 :
max_pos_1 = pos1
if pos2 < min_pos_2 :
min_pos_2 = pos2
if pos2 > max_pos_2 :
max_pos_2 = pos2
unmapped_double_pos[str(pos1) + '_' + str(pos2)] = False
unmapped_double_pos[str(pos2) + '_' + str(pos1)] = False
delta_mat[count_mat > 0] = delta_mat[count_mat > 0] / count_mat[count_mat > 0]
if interpolate_missing :
for pos in unmapped_pos :
if unmapped_pos[pos] == True and pos > min_pos and pos < max_pos :
delta_vec[0, pos] = (delta_vec[0, pos-1] + delta_vec[0, pos+1]) / 2.
for pos_str in unmapped_double_pos :
pos1, pos2 = [int(str_part) for str_part in pos_str.split('_')]
if unmapped_double_pos[pos_str] == True and (pos1 > min_pos_1 and pos1 < max_pos_1) and (pos2 > min_pos_2 and pos2 < max_pos_2) :
delta_mat[pos1, pos2] = delta_mat[pos1-1, pos2-1] + delta_mat[pos1-1, pos2] + delta_mat[pos1-1, pos2+1]
delta_mat[pos1, pos2] += delta_mat[pos1, pos2-1] + delta_mat[pos1, pos2+1]
delta_mat[pos1, pos2] += delta_mat[pos1+1, pos2-1] + delta_mat[pos1+1, pos2] + delta_mat[pos1+1, pos2+1]
delta_mat[pos1, pos2] /= 8.
fig = None
if wt_seq != 'mean' :
fig = plt.figure(figsize=(8, 8.5))
gs = gridspec.GridSpec(2, 1, height_ratios=[1, 16])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax = [ax0, ax1]
for j in range(min_pos_1, max_pos_1) :
letterAt(wt_seq[j], j + 0.5, 0, 1, ax[0])
plt.sca(ax[0])
if plot_start is not None and plot_end is not None :
plt.xlim((plot_start, plot_end))
else :
plt.xlim((min_pos_1, max_pos_1))
plt.ylim((0, 1))
plt.xticks([], [])
plt.yticks([], [])
plt.axis('off')
else :
fig = plt.figure(figsize=(8, 8))
ax = [None, plt.gca()]
ax[1].pcolor(delta_mat, cmap='bwr', vmin=vmin, vmax=vmax)
if not interpolate_missing :
for pos_str in unmapped_double_pos :
pos1, pos2 = [int(str_part) for str_part in pos_str.split('_')]
if unmapped_double_pos[pos_str] == True and (pos1 >= min_pos_1 and pos1 <= max_pos_1) and (pos2 >= min_pos_2 and pos2 <= max_pos_2) :
ax[1].plot([pos1, pos1+1], [pos2, pos2+1], color='black', alpha=0.5, linewidth=2)
ax[1].plot([pos1, pos1+1], [pos2+1, pos2], color='black', alpha=0.5, linewidth=2)
plt.sca(ax[1])
if plot_start is not None and plot_end is not None :
plt.xticks(np.array([plot_start, 50, 56, plot_end-1]) + 0.5, [plot_start - 50, 0, 6, plot_end-1 - 50], fontsize=18)
plt.xlim((plot_start, plot_end))
else :
plt.xticks(np.array([min_pos_1, 50, 56, max_pos_1-1]) + 0.5, np.array([min_pos_1, 50, 56, max_pos_1-1]) - 50, fontsize=18)
plt.xlim((min_pos_1, max_pos_1))
plt.yticks(np.array([min_pos_2, 50, 56, max_pos_2-1]) + 0.5, np.array([min_pos_2, 50, 56, max_pos_2-1]) - 50, fontsize=18)
plt.ylim((min_pos_2, max_pos_2))
plt.tight_layout()
plt.show()
def append_6mer_delta_logodds_scores(var_df, mer6_weights_use, mer6_weights_pas, mer6_weights_dse) :
mer6_dict = {}
mer6_list = []
i = 0
bases = ['A', 'C', 'G', 'T']
for b1 in bases :
for b2 in bases :
for b3 in bases :
for b4 in bases :
for b5 in bases :
for b6 in bases :
mer6_dict[b1 + b2 + b3 + b4 + b5 + b6] = i
mer6_list.append(b1 + b2 + b3 + b4 + b5 + b6)
i += 1
delta_6mer_scores = []
for _, row in var_df.iterrows() :
snv_pos = row['snv_pos']
var_seq = row['master_seq']
ref_seq = row['wt_seq']
delta_score = 0
mer6_weights = mer6_weights_use
if snv_pos >= 50 and snv_pos <= 56 :
mer6_weights = mer6_weights_pas
elif snv_pos > 56 :
mer6_weights = mer6_weights_dse
for offset in range(-6 + 1, 6 - 1) :
var_motif = var_seq[snv_pos+offset:snv_pos+offset + 6]
if len(var_motif) == 6 :
delta_score += mer6_weights[mer6_dict[var_motif]]
ref_motif = ref_seq[snv_pos+offset:snv_pos+offset + 6]
if len(ref_motif) == 6 :
delta_score -= mer6_weights[mer6_dict[ref_motif]]
delta_6mer_scores.append(delta_score)
var_df['delta_6mer_score'] = delta_6mer_scores
return var_df
def plot_gain_loss_of_motifs(df_var, motifs, name_prefix='', plot_logos=False, save_figs=False) :
gain_of_motif_index = []
loss_of_motif_index = []
for motif in motifs :
i = 0
for _, row in df_var.iterrows() :
snv_pos = row['snv_pos']
var_seq = row['master_seq']
ref_seq = row['wt_seq']
motif_len = len(motif)
var_motif_dict = {}
ref_motif_dict = {}
for offset in range(-motif_len + 1, motif_len - 1) :
var_motif = var_seq[snv_pos+offset:snv_pos+offset + motif_len]
if len(var_motif) == motif_len :
if var_motif not in var_motif_dict :
var_motif_dict[var_motif] = 0
var_motif_dict[var_motif] += 1
ref_motif = ref_seq[snv_pos+offset:snv_pos+offset + motif_len]
if len(ref_motif) == motif_len :
if ref_motif not in ref_motif_dict :
ref_motif_dict[ref_motif] = 0
ref_motif_dict[ref_motif] += 1
if motif in var_motif_dict and motif not in ref_motif_dict :
gain_of_motif_index.append(i)
elif motif not in var_motif_dict and motif in ref_motif_dict :
loss_of_motif_index.append(i)
i += 1
gain_of_motif_df = df_var.iloc[gain_of_motif_index].copy().set_index('master_seq')
loss_of_motif_df = df_var.iloc[loss_of_motif_index].copy().set_index('master_seq')
print("Gain of Motif (Neural Net model)")
fig_name = None
if save_figs :
fig_name = 'gain_of_' + name_prefix + '_neural_net'
plot_position_delta_scatter(gain_of_motif_df, min_pred_filter=0.0, figsize=(12, 6), fig_name=fig_name, fig_dpi=150, annotate=None)
print("Gain of Motif (Hexamer model)")
fig_name = None
if save_figs :
fig_name = 'gain_of_' + name_prefix + '_hexamer_model'
plot_position_delta_scatter(gain_of_motif_df, min_pred_filter=0.0, figsize=(12, 6), pred_column='delta_6mer_score', fig_name=fig_name, fig_dpi=150, annotate=None)
print('')
print('# Gain of ' + str(motifs) + ' = ' + str(len(gain_of_motif_df)))
print("# Neural net / Observation agreement = " + str(len(np.nonzero(np.sign(gain_of_motif_df['delta_logodds_pred']) == np.sign(gain_of_motif_df['delta_logodds_true']))[0])))
print("# Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(gain_of_motif_df['delta_logodds_pred']) == np.sign(gain_of_motif_df['delta_6mer_score']))[0])))
print('## Gain of motif in USE = ' + str(len(gain_of_motif_df.query("snv_pos < 50"))))
print('### Gain of motif, Loss of function = ' + str(len(gain_of_motif_df.query("snv_pos < 50 and delta_logodds_true < 0.0"))))
print('### Gain of motif, Gain of function = ' + str(len(gain_of_motif_df.query("snv_pos < 50 and delta_logodds_true > 0.0"))))
print("### Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(gain_of_motif_df.query("snv_pos < 50")['delta_logodds_pred']) == np.sign(gain_of_motif_df.query("snv_pos < 50")['delta_6mer_score']))[0])))
print('## Gain of motif in DSE = ' + str(len(gain_of_motif_df.query("snv_pos >= 56"))))
print('### Gain of motif, Loss of function = ' + str(len(gain_of_motif_df.query("snv_pos >= 56 and delta_logodds_true < 0.0"))))
print('### Gain of motif, Gain of function = ' + str(len(gain_of_motif_df.query("snv_pos >= 56 and delta_logodds_true > 0.0"))))
print("### Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(gain_of_motif_df.query("snv_pos >= 56")['delta_logodds_pred']) == np.sign(gain_of_motif_df.query("snv_pos >= 56")['delta_6mer_score']))[0])))
print("Loss of Motif (Neural Net model)")
fig_name = None
if save_figs :
fig_name = 'loss_of_' + name_prefix + '_neural_net'
plot_position_delta_scatter(loss_of_motif_df, min_pred_filter=0.0, figsize=(12, 6), fig_name=fig_name, fig_dpi=150, annotate=None)
print("Loss of Motif (Hexamer model)")
fig_name = None
if save_figs :
fig_name = 'loss_of_' + name_prefix + '_hexamer_model'
plot_position_delta_scatter(loss_of_motif_df, min_pred_filter=0.0, figsize=(12, 6), pred_column='delta_6mer_score', fig_name=fig_name, fig_dpi=150, annotate=None)
print('')
print('# Loss of ' + str(motifs) + ' = ' + str(len(loss_of_motif_df)))
print("# Neural net / Observation agreement = " + str(len(np.nonzero(np.sign(loss_of_motif_df['delta_logodds_pred']) == np.sign(loss_of_motif_df['delta_logodds_true']))[0])))
print("# Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(loss_of_motif_df['delta_logodds_pred']) == np.sign(loss_of_motif_df['delta_6mer_score']))[0])))
print('## Loss of motif in USE = ' + str(len(loss_of_motif_df.query("snv_pos < 50"))))
print('### Loss of motif, Loss of function = ' + str(len(loss_of_motif_df.query("snv_pos < 50 and delta_logodds_true < 0.0"))))
print('### Loss of motif, Gain of function = ' + str(len(loss_of_motif_df.query("snv_pos < 50 and delta_logodds_true > 0.0"))))
print("### Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(loss_of_motif_df.query("snv_pos < 50")['delta_logodds_pred']) == np.sign(loss_of_motif_df.query("snv_pos < 50")['delta_6mer_score']))[0])))
print('## Loss of motif in DSE = ' + str(len(loss_of_motif_df.query("snv_pos >= 56"))))
print('### Loss of motif, Loss of function = ' + str(len(loss_of_motif_df.query("snv_pos >= 56 and delta_logodds_true < 0.0"))))
print('### Loss of motif, Gain of function = ' + str(len(loss_of_motif_df.query("snv_pos >= 56 and delta_logodds_true > 0.0"))))
print("### Neural net / 6-mer model agreement = " + str(len(np.nonzero(np.sign(loss_of_motif_df.query("snv_pos >= 56")['delta_logodds_pred']) == np.sign(loss_of_motif_df.query("snv_pos >= 56")['delta_6mer_score']))[0])))
if plot_logos :
print("Plotting Gain of Motif logos in sorted order.")
df_sel = gain_of_motif_df.query("snv_pos >= 57 and delta_logodds_true > 0.0").sort_values(by='delta_logodds_true', ascending=False)
seq_start = 48
seq_end = 130
for index, row in df_sel.iterrows() :
gene_name = row['gene']
snv_pos = row['snv_pos']
snv_nt = index[snv_pos]
wt_seq = row['wt_seq']
print('Gene = ' + gene_name)
print('WT seq = ' + wt_seq)
print('SNV pos = ' + str(snv_pos))
print('SNV nt = ' + snv_nt)
mut_map_with_cuts(
seq_predicted_isoform_df_delta.query("wt_seq == '" + wt_seq + "'"),
gene_name,
[(snv_pos, snv_nt, 'darkgreen' if row['delta_logodds_true'] > 0 else 'red')],
mode='true',
column_suffix='',
figsize=(14, 7),
height_ratios=[6, 2, 2],
bg_alpha=0.999,
plot_simple_mutmap=True,
annotate_folds=True,
plot_true_cuts=True,
plot_pred_cuts=True,
scale_pred_cuts=True,
fold_change_from_cut_range=[60, 100],
ref_var_scales=[0.5, 1.0],
border_eta = 0.06,
seq_trim_start=seq_start, seq_trim_end=seq_end,
plot_start=0, plot_end=seq_end-seq_start,
plot_as_bars=False,
pas_downscaling=0.5,
fig_name=None,
fig_dpi=150
)
print("Plotting Loss of Motif logos in sorted order.")
df_sel = loss_of_motif_df.query("snv_pos >= 57").sort_values(by='delta_logodds_true')
seq_start = 48
seq_end = 130
for index, row in df_sel.iterrows() :
gene_name = row['gene']
snv_pos = row['snv_pos']
snv_nt = index[snv_pos]
wt_seq = row['wt_seq']
print('Gene = ' + gene_name)
print('WT seq = ' + wt_seq)
print('SNV pos = ' + str(snv_pos))
print('SNV nt = ' + snv_nt)
mut_map_with_cuts(
seq_predicted_isoform_df_delta.query("wt_seq == '" + wt_seq + "'"),
gene_name,
[(snv_pos, snv_nt, 'darkgreen' if row['delta_logodds_true'] > 0 else 'red')],
mode='true',
column_suffix='',
figsize=(14, 7),
height_ratios=[6, 2, 2],
bg_alpha=0.999,
plot_simple_mutmap=True,
annotate_folds=True,
plot_true_cuts=True,
plot_pred_cuts=True,
scale_pred_cuts=True,
fold_change_from_cut_range=[60, 100],
ref_var_scales=[0.5, 1.0],
border_eta = 0.06,
seq_trim_start=seq_start, seq_trim_end=seq_end,
plot_start=0, plot_end=seq_end-seq_start,
plot_as_bars=False,
pas_downscaling=0.5,
fig_name=None,
fig_dpi=150
)
return gain_of_motif_df, loss_of_motif_df
def plot_variant_selection(human_variant_df, gene_names, snv_list, wt_seq_list, experiment_name, fold_change_from_cut_range=None, seq_start=48, seq_end=100, save_figs=False) :
for gene_name, snvs, specific_seq in zip(gene_names, snv_list, wt_seq_list) :
df_sel = human_variant_df.set_index('master_seq')
if specific_seq is not None :
df_sel = human_variant_df.set_index('master_seq').query("wt_seq == '" + specific_seq + "'")
print('Gene = ' + gene_name)
print('WT seq = ' + specific_seq)
print('SNV pos = ' + str(snvs[0][0]))
print('SNV nt = ' + str(snvs[0][1]))
print('ClinVar id = ' + str(df_sel.loc[df_sel.index.str.slice(snvs[0][0], snvs[0][0]+1) == snvs[0][1]].query("gene == '" + gene_name + "' and snv_pos == " + str(snvs[0][0]))['clinvar_id'].values[0]))
fig_name = None
if save_figs :
fig_name = experiment_name + '_' + gene_name + '_' + str(snvs[0][0]) + '_' + str(snvs[0][1]) + '_Both'
mut_map_with_cuts(
df_sel,
gene_name,
snvs,
mode='true',
column_suffix='',
figsize=(14, 7),
height_ratios=[6, 2, 2],
bg_alpha=0.999,
plot_simple_mutmap=True,
annotate_folds=True,
plot_true_cuts=True,
plot_pred_cuts=True,
scale_pred_cuts=True,
fold_change_from_cut_range=fold_change_from_cut_range,
ref_var_scales=[0.5, 1.0],
border_eta = 0.06,
seq_trim_start=seq_start, seq_trim_end=seq_end,
plot_start=0, plot_end=seq_end-seq_start,
plot_as_bars=False,
pas_downscaling=0.5,
fig_name=fig_name,
fig_dpi=150
)
def mut_map_with_cuts_and_fold(df, gene_name, mfe_ref, mfe_var, mfe_struct_ref, mfe_struct_var, cut_snvs, mode, column_suffix='', figsize=(12, 6), fold_height=0.5, height_ratios=[6, 2, 2], bg_alpha=0.5, plot_simple_mutmap=True, annotate_folds=True, plot_true_cuts=True, plot_pred_cuts=False, scale_pred_cuts=False, fold_change_from_cut_range=None, ref_var_scales=[0.3, 0.7], border_eta = 0.085, seq_trim_start=0, seq_trim_end=164, plot_start=0, plot_end=164, plot_as_bars=True, pas_downscaling=0.5, fig_name=None, fig_dpi=300) :
mut_map = np.zeros((4, 164))
df_gene = df.query("gene == '" + gene_name + "'")
ref_seq = df_gene['wt_seq'].values[0]
for index, row in df_gene.iterrows() :
snv_pos = row['snv_pos']
if row['wt_seq'] != ref_seq :
continue
delta_logodds_true = row['delta_logodds_' + mode + column_suffix]
if np.isnan(delta_logodds_true) :
delta_logodds_true = 0
base = 0
if index[snv_pos] == 'A' :
base = 0
elif index[snv_pos] == 'C' :
base = 1
elif index[snv_pos] == 'G' :
base = 2
elif index[snv_pos] == 'T' :
base = 3
mut_map[3-base, snv_pos] = delta_logodds_true
#Down-scale PAS mutations
mut_map[:, 50:50+6] = mut_map[:, 50:50+6] * pas_downscaling
#Slice according to seq trim index
ref_seq = ref_seq[seq_trim_start: seq_trim_end]
mut_map = mut_map[:, seq_trim_start: seq_trim_end]
mfe_struct_ref = mfe_struct_ref[seq_trim_start: seq_trim_end]
mfe_struct_var = mfe_struct_var[seq_trim_start: seq_trim_end]
fig = plt.figure(figsize=figsize)
gs = gridspec.GridSpec(3, 1, height_ratios=height_ratios)
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax2 = plt.subplot(gs[2])
ax = [ax0, ax1, ax2]
bias = np.max(np.sum(mut_map[:, :], axis=0)) / 3.0 + 0.5
max_score = np.min(np.sum(mut_map[:, :], axis=0)) / 3.0 * -1 + bias
for i in range(plot_start, plot_end) :
mutability_score = np.sum(mut_map[:, i]) / 3.0 * -1 + bias
color = 'black'
alpha = bg_alpha
char_height = 1
for snv_pos, snv_nt, snv_color in cut_snvs :
if i == snv_pos - seq_trim_start :
#color = snv_color#None
#alpha = 1.0
color = 'black'
alpha = bg_alpha
char_height = ref_var_scales[0]
letterAt(snv_nt, i + 0.5, ref_var_scales[0], ref_var_scales[1], ax[1], color=snv_color, alpha=1.0)
break
if not plot_simple_mutmap :
letterAt(ref_seq[i], i + 0.5, 0, mutability_score, ax[1], color=color, alpha=alpha)
else :
letterAt(ref_seq[i], i + 0.5, 0, char_height, ax[1], color=color, alpha=alpha)
if mfe_struct_ref[i] != 'X' :
letterAt(mfe_struct_ref[i], i + 0.5, -fold_height, fold_height-0.05, ax[1], color='black')
if mfe_struct_var[i] != 'X' :
letterAt(mfe_struct_var[i], i + 0.5, -2*fold_height-0.05, fold_height-0.05, ax[1], color='black')
annot_text = 'MFE = ' + str(round(mfe_ref, 1))
#logo_ax[2].annotate(annot_text, xy=(56, -fold_height/2), xycoords='data', xytext=(-30, 0), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
ax[1].text(55-seq_trim_start, -fold_height/2 -0.05, annot_text, horizontalalignment='right', verticalalignment='center', color='black', fontsize=12, weight="bold")
annot_text = 'MFE = ' + str(round(mfe_var, 1))
#logo_ax[2].annotate(annot_text, xy=(56, -fold_height/2), xycoords='data', xytext=(-30, 0), ha='right', fontsize=10, weight="bold", color='black', textcoords='offset points', arrowprops=dict(headlength=8, headwidth=8, shrink=0.15, width=1.5, color='black'))
ax[1].text(55-seq_trim_start, -fold_height -fold_height/2 -0.05-0.05, annot_text, horizontalalignment='right', verticalalignment='center', color='black', fontsize=12, weight="bold")
if not plot_simple_mutmap :
ax[1].plot([0, mut_map.shape[1]], [bias, bias], color='black', linestyle='--')
plt.sca(ax[1])
if not plot_simple_mutmap :
plt.yticks([0.5, bias, max_score], [round(bias - 0.5, 2), 0, round((max_score - bias) * -1, 2)], fontsize=16)
plt.ylim((-2*fold_height-0.175, max_score))
else :
plt.yticks([], [])
plt.ylim((-2*fold_height-0.175, ref_var_scales[0] + ref_var_scales[1]))
plt.axis('off')
plt.xlim((plot_start, plot_end))
plt.tight_layout()
pcm = ax[2].pcolor(mut_map, cmap='RdBu_r', vmin=-np.abs(mut_map).max(), vmax=np.abs(mut_map).max())
#fig.colorbar(pcm, ax=ax[1])
plt.sca(ax[2])
ref_seq_list = []
for c in ref_seq :
ref_seq_list.append(c)
#plt.xticks(np.arange(len(ref_seq)) + 0.5, ref_seq_list)
plt.xticks([], [])
plt.yticks([0.5, 1.5, 2.5, 3.5], ['T', 'G', 'C', 'A'], fontsize=16)
#plt.gca().xaxis.tick_top()
#plt.xticks(fontsize=16)
plt.axis([plot_start, plot_end, 0, 4])
for i in range(plot_start, plot_end) :
for j in range(0, 4) :
base = 'A'
if j == 3 :
base = 'A'
elif j == 2 :
base = 'C'
elif j == 1 :
base = 'G'
elif j == 0 :
base = 'T'
is_marked = False
for snv_pos, snv_nt, _ in cut_snvs :
if i == snv_pos - seq_trim_start and base == snv_nt :
is_marked = True
break
if not is_marked :
ax[2].add_patch(Rectangle((i, j), 1, 1, fill=True, facecolor='white', alpha=1. - bg_alpha, edgecolor=None))
ref_cut_true = df_gene['cut_prob_true_ref'].values[0][seq_trim_start: seq_trim_end]
ref_cut_pred = df_gene['cut_prob_pred_ref'].values[0][seq_trim_start: seq_trim_end]
max_y_var_hat = 0
for snv_pos, snv_nt, snv_color in cut_snvs :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
var_cut_true = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_var'][0][seq_trim_start: seq_trim_end]
var_cut_pred = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0][seq_trim_start: seq_trim_end]
if scale_pred_cuts :
ref_pred_logodds = np.zeros(ref_cut_pred.shape)
var_pred_logodds = np.zeros(var_cut_pred.shape)
ref_pred_logodds[ref_cut_pred > 0.0] = np.log(ref_cut_pred[ref_cut_pred > 0.0] / (1.0 - ref_cut_pred[ref_cut_pred > 0.0]))
var_pred_logodds[var_cut_pred > 0.0] = np.log(var_cut_pred[var_cut_pred > 0.0] / (1.0 - var_cut_pred[var_cut_pred > 0.0]))
pred_fold_change = np.exp(var_pred_logodds - ref_pred_logodds)
#var_cut_pred = ref_cut_true * pred_fold_change
ref_cut_true_odds = ref_cut_true / (1. - ref_cut_true)
var_cut_pred_odds = ref_cut_true_odds * pred_fold_change
var_cut_pred = var_cut_pred_odds / (1. + var_cut_pred_odds)
if plot_true_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(var_cut_true[plot_start:plot_end]))
if plot_pred_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(var_cut_pred[plot_start:plot_end]))
if plot_as_bars :
if plot_true_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_true[plot_start:plot_end], color=snv_color, alpha=0.85, where='mid', linewidth=3)
if plot_pred_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_pred[plot_start:plot_end], color=snv_color, linestyle='--', alpha=0.85, where='mid', linewidth=3)
else :
if plot_true_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_true[plot_start:plot_end], color=snv_color, linestyle='-', linewidth=3, alpha=0.7)
if plot_pred_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, var_cut_pred[plot_start:plot_end], color=snv_color, linestyle='--', linewidth=3, alpha=0.7)
#Highlight specific snv in mutation map
base = 0
if snv_nt == 'A' :
base = 0
elif snv_nt == 'C' :
base = 1
elif snv_nt == 'G' :
base = 2
elif snv_nt == 'T' :
base = 3
#ax[2].add_patch(Rectangle((snv_pos, 3 - base), 1, 1, fill=False, edgecolor=snv_color, lw=4))
ax[2].add_patch(Rectangle((snv_pos - seq_trim_start + border_eta, 3 - base + border_eta), 1 - 2.*border_eta, 1 - 2.*border_eta, fill=False, edgecolor=snv_color, lw=4))
#ax[1].add_patch(Rectangle((snv_pos, 0), 1, max_score, fill=False, edgecolor=snv_color, lw=4))
if plot_true_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(ref_cut_true[plot_start:plot_end]))
if plot_pred_cuts and not scale_pred_cuts :
max_y_var_hat = max(max_y_var_hat, np.max(ref_cut_pred[plot_start:plot_end]))
#Annotate min/max delta isoform log odds
min_mutmap_logodds = round((max_score - bias) * -1, 2)
max_mutmap_logodds = round(bias - 0.5, 2)
annot_text = 'Min = ' + str(min_mutmap_logodds) + '\nMax = ' + str(max_mutmap_logodds)
ax[0].text(0.05, 0.80, annot_text,
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color='black', fontsize=16, weight="bold")
snv_i = 0
for snv_pos, snv_nt, snv_color in cut_snvs :
if annotate_folds :
if plot_true_cuts :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
df_pos = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]
fold_change = np.exp(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['delta_logodds_true' + column_suffix][0])
if fold_change_from_cut_range :
fold_range_start = fold_change_from_cut_range[0]
fold_range_end = fold_change_from_cut_range[1]
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0][fold_range_start: fold_range_end])
var_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_var'][0][fold_range_start: fold_range_end])
fold_change = (var_p / (1. - var_p)) / (ref_p / (1. - ref_p))
fold_color = 'darkgreen'
if fold_change < 1. :
fold_color = 'red'
fold_change = 1. / fold_change
#fold_color = snv_color
row_multiplier = 0.1
row_bias = 0
if plot_pred_cuts :
row_multiplier = 0.2
ax[0].text(0.70, 0.80 - row_multiplier * snv_i, snv_nt + ':',
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=snv_color, fontsize=16, weight="bold")
ax[0].text(0.73, 0.80 - row_multiplier * snv_i, 'Fold change = ' + str(round(fold_change, 2)),
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=fold_color, fontsize=16, weight="bold")
if plot_pred_cuts :
df_pos = df_gene.query("snv_pos == " + str(snv_pos))
df_pos = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]
fold_change = np.exp(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['delta_logodds_pred' + column_suffix][0])
if fold_change_from_cut_range :
fold_range_start = fold_change_from_cut_range[0]
fold_range_end = fold_change_from_cut_range[1]
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_ref'][0][fold_range_start: fold_range_end])
var_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0][fold_range_start: fold_range_end])
if scale_pred_cuts :
ref_p = np.sum(df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0][fold_range_start: fold_range_end])
ref_cut_true_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_true_ref'][0]#[0: seq_trim_end]
ref_cut_pred_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_ref'][0]#[0: seq_trim_end]
var_cut_pred_t = df_pos[df_pos.index.str.slice(snv_pos, snv_pos + 1) == snv_nt]['cut_prob_pred_var'][0]#[0: seq_trim_end]
ref_pred_logodds = np.zeros(ref_cut_pred_t.shape)
var_pred_logodds = np.zeros(var_cut_pred_t.shape)
ref_pred_logodds[ref_cut_pred_t > 0.0] = np.log(ref_cut_pred_t[ref_cut_pred_t > 0.0] / (1.0 - ref_cut_pred_t[ref_cut_pred_t > 0.0]))
var_pred_logodds[var_cut_pred_t > 0.0] = np.log(var_cut_pred_t[var_cut_pred_t > 0.0] / (1.0 - var_cut_pred_t[var_cut_pred_t > 0.0]))
pred_fold_change = np.exp(var_pred_logodds - ref_pred_logodds)
#var_cut_pred = ref_cut_true * pred_fold_change
ref_cut_true_odds = ref_cut_true_t / (1. - ref_cut_true_t)
var_cut_pred_odds = ref_cut_true_odds * pred_fold_change
var_cut_pred_t = var_cut_pred_odds / (1. + var_cut_pred_odds)
var_p = np.sum(var_cut_pred_t[fold_range_start: fold_range_end])
fold_change = (var_p / (1. - var_p)) / (ref_p / (1. - ref_p))
fold_color = 'darkgreen'
if fold_change < 1. :
fold_color = 'red'
fold_change = 1. / fold_change
#fold_color = snv_color
row_multiplier = 0.1
row_bias = 0.0
if plot_true_cuts :
row_multiplier = 0.2
row_bias = 0.1
ax[0].text(0.70, 0.80 - row_multiplier * snv_i - row_bias, snv_nt + ':',
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=snv_color, fontsize=16, weight="bold")
ax[0].text(0.73, 0.80 - row_multiplier * snv_i - row_bias, 'Predicted change = ' + str(round(fold_change, 2)),
horizontalalignment='left', verticalalignment='bottom',
transform=ax[0].transAxes,
color=fold_color, fontsize=16, weight="bold")
snv_i += 1
#Plot reference cut distribution
if plot_as_bars :
if plot_true_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_true[plot_start:plot_end], color='black', alpha=0.85, where='mid', linewidth=3)
if plot_pred_cuts and not scale_pred_cuts :
ax[0].step(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_pred[plot_start:plot_end], color='black', linestyle='--', alpha=0.85, where='mid', linewidth=3)
else :
if plot_true_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_true[plot_start:plot_end], color='black', linestyle='-', linewidth=3, alpha=0.7)
if plot_pred_cuts and not scale_pred_cuts :
ax[0].plot(np.arange(plot_end)[plot_start:plot_end] + 1, ref_cut_pred[plot_start:plot_end], color='black', linestyle='--', linewidth=3, alpha=0.7)
#ax[0].plot([57, 57], [0, max(np.max(ref_cut[:164]), max_y_var_hat)], color='green', linestyle='--', linewidth=3)
#ax[0].plot([97, 97], [0, max(np.max(ref_cut[:164]), max_y_var_hat)], color='green', linestyle='--', linewidth=3)
plt.sca(ax[0])
plt.xlim((plot_start, plot_end))
plt.yticks(fontsize=16)
plt.ylim(0, max_y_var_hat * 1.02)
plt.tick_params(axis='x', which='both', bottom=False, top=False, labelbottom=False)
plt.tight_layout()
if fig_name is not None :
plt.savefig(fig_name + '.png', transparent=True, dpi=fig_dpi)
plt.savefig(fig_name + '.svg')
plt.savefig(fig_name + '.eps')
plt.show()
| 46.841278
| 691
| 0.606204
| 29,061
| 193,595
| 3.694092
| 0.024466
| 0.01632
| 0.012482
| 0.017475
| 0.911452
| 0.892598
| 0.873624
| 0.858282
| 0.839969
| 0.830393
| 0
| 0.039007
| 0.255255
| 193,595
| 4,132
| 692
| 46.852614
| 0.705581
| 0.039283
| 0
| 0.713275
| 0
| 0
| 0.1336
| 0.065586
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010108
| false
| 0
| 0.005391
| 0
| 0.021226
| 0.014488
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ac4a90003594973ff6e0d58f8fc0dd40bc4a7308
| 3,865
|
py
|
Python
|
test/test_timespan.py
|
Uberspace/libestg3b
|
3f544002c655aa70521069bdf1b1e141fb38bd87
|
[
"MIT"
] | 5
|
2018-11-05T12:46:49.000Z
|
2020-01-06T03:11:10.000Z
|
test/test_timespan.py
|
Uberspace/libestg3b
|
3f544002c655aa70521069bdf1b1e141fb38bd87
|
[
"MIT"
] | 21
|
2018-09-18T10:27:14.000Z
|
2018-09-22T18:54:38.000Z
|
test/test_timespan.py
|
Uberspace/libestg3b
|
3f544002c655aa70521069bdf1b1e141fb38bd87
|
[
"MIT"
] | null | null | null |
import datetime as DT
import pytest
from libestg3b.estg3b import Timespan
@pytest.mark.parametrize("t1,t2,overlaps", [
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 3, 5), DT.datetime(2018, 10, 3, 8)),
False,
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
True,
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 8), DT.datetime(2018, 10, 2, 9)),
True,
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 7), DT.datetime(2018, 10, 2, 9)),
True,
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10)),
False,
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10)),
False,
],
])
def test_timespan_overlaps(t1, t2, overlaps):
assert t1.overlaps(t2) == overlaps
assert t2.overlaps(t1) == overlaps
@pytest.mark.parametrize("t1,t2,tr", [
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 8), DT.datetime(2018, 10, 2, 10)),
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 10)),
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 7), DT.datetime(2018, 10, 2, 10)),
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 10)),
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 6), DT.datetime(2018, 10, 2, 7)),
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
],
])
def test_timespan_merge_with(t1, t2, tr):
assert t1.merge_with(t2) == tr
assert t2.merge_with(t1) == tr
def test_timespan_merge_with_non_overlapping():
t1 = Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8))
t2 = Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10))
with pytest.raises(Exception) as exc:
t1.merge_with(t2)
assert 'overlapping' in str(exc)
@pytest.mark.parametrize("ts_in,ts_out", [
[
[
Timespan(DT.datetime(2018, 10, 2, 8), DT.datetime(2018, 10, 2, 10)),
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 10)),
]
],
[
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10)),
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10)),
]
],
[
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 8)),
Timespan(DT.datetime(2018, 10, 2, 9), DT.datetime(2018, 10, 2, 10)),
Timespan(DT.datetime(2018, 10, 2, 8), DT.datetime(2018, 10, 2, 9)),
Timespan(DT.datetime(2018, 10, 2, 11), DT.datetime(2018, 10, 2, 12)),
],
[
Timespan(DT.datetime(2018, 10, 2, 5), DT.datetime(2018, 10, 2, 10)),
Timespan(DT.datetime(2018, 10, 2, 11), DT.datetime(2018, 10, 2, 12)),
]
],
])
def test_timespan_union(ts_in, ts_out):
assert Timespan.union(ts_in) == ts_out
| 34.508929
| 81
| 0.550582
| 580
| 3,865
| 3.632759
| 0.074138
| 0.341718
| 0.478405
| 0.546749
| 0.839108
| 0.783579
| 0.754153
| 0.754153
| 0.754153
| 0.754153
| 0
| 0.215789
| 0.262613
| 3,865
| 111
| 82
| 34.81982
| 0.523509
| 0
| 0
| 0.545455
| 0
| 0
| 0.011643
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.040404
| false
| 0
| 0.030303
| 0
| 0.070707
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ac7cbcf11845ffa5f374a854ccfeda9afc9d6f24
| 2,650
|
py
|
Python
|
tests/test_git_check_remote.py
|
pradyunsg/release-helper
|
3c338d920d8803db13ca25fdef85c753b9612261
|
[
"MIT"
] | 1
|
2022-02-23T19:23:10.000Z
|
2022-02-23T19:23:10.000Z
|
tests/test_git_check_remote.py
|
pradyunsg/release-helper
|
3c338d920d8803db13ca25fdef85c753b9612261
|
[
"MIT"
] | null | null | null |
tests/test_git_check_remote.py
|
pradyunsg/release-helper
|
3c338d920d8803db13ca25fdef85c753b9612261
|
[
"MIT"
] | null | null | null |
from release_helper.commands.git_check_remote import cmd
def test_fails_on_no_urls(runner, git):
result = runner.invoke(cmd, ["upstream"])
assert result.exit_code == 2
assert result.stdout == ""
assert result.stderr == "FATAL: Got no URLs.\n"
def test_fails_on_missing_remote(runner, git):
result = runner.invoke(cmd, ["upstream", "https://pradyunsg.me/repo.git"])
assert result.exit_code == 3
assert result.stdout == ""
assert result.stderr.endswith("FATAL: git did not exit cleanly.\n")
def test_fails_on_single_url_that_does_not_match(runner, git):
git("remote", "add", "upstream", "https://pradyunsg.me/not-repo.git")
result = runner.invoke(cmd, ["upstream", "https://pradyunsg.me/repo.git"])
assert result.exit_code == 1
assert result.stdout == ""
assert result.stderr == (
"git remote 'upstream' does not include any URL from:\n"
"- https://pradyunsg.me/repo.git\n"
)
def test_fails_on_multiple_url_that_do_not_match(runner, git):
git("remote", "add", "upstream", "https://pradyunsg.me/not-repo.git")
result = runner.invoke(
cmd,
[
"upstream",
"https://pradyunsg.me/repo.git",
"https://pradyunsg.me/repo-alt.git",
],
)
assert result.exit_code == 1
assert result.stdout == ""
assert result.stderr == (
"git remote 'upstream' does not include any URL from:\n"
"- https://pradyunsg.me/repo.git\n"
"- https://pradyunsg.me/repo-alt.git\n"
)
def test_succeeds_on_multiple_urls_match(runner, git):
git("remote", "add", "upstream", "https://pradyunsg.me/repo.git")
result = runner.invoke(
cmd,
[
"upstream",
"https://pradyunsg.me/repo.git",
"https://pradyunsg.me/repo-alt.git",
],
)
assert result.exit_code == 0
assert result.stdout == ""
assert result.stderr == ""
def test_succeeds_on_single_url_match(runner, git):
git("remote", "add", "upstream", "https://pradyunsg.me/repo.git")
result = runner.invoke(cmd, ["upstream", "https://pradyunsg.me/repo.git"])
assert result.exit_code == 0
assert result.stdout == ""
assert result.stderr == ""
def test_succeeds_on_single_url_match_from_multiple(runner, git):
git("remote", "add", "upstream", "https://pradyunsg.me/repo.git")
git("remote", "set-url", "--add", "upstream", "https://pradyunsg.me/another.git")
result = runner.invoke(cmd, ["upstream", "https://pradyunsg.me/repo.git"])
assert result.exit_code == 0
assert result.stdout == ""
assert result.stderr == ""
| 29.444444
| 85
| 0.629434
| 342
| 2,650
| 4.72807
| 0.157895
| 0.155844
| 0.168213
| 0.17316
| 0.84663
| 0.802721
| 0.742115
| 0.714904
| 0.714904
| 0.714904
| 0
| 0.00333
| 0.206792
| 2,650
| 89
| 86
| 29.775281
| 0.765937
| 0
| 0
| 0.666667
| 0
| 0
| 0.323774
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.111111
| false
| 0
| 0.015873
| 0
| 0.126984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3bdb1483811ad34af5591c9cb6670e9d78dd9009
| 129,280
|
py
|
Python
|
votecheck/__init__.py
|
BenWirus/ZombieVoters
|
cdd902cdcfe3211541ab9622fcefea905e75ce3e
|
[
"MIT"
] | 36
|
2020-11-06T03:02:54.000Z
|
2020-11-12T02:14:43.000Z
|
votecheck/__init__.py
|
BenWirus/ZombieVoters
|
cdd902cdcfe3211541ab9622fcefea905e75ce3e
|
[
"MIT"
] | 8
|
2020-11-06T04:27:22.000Z
|
2020-12-03T01:27:12.000Z
|
votecheck/__init__.py
|
BenWirus/ZombieVoters
|
cdd902cdcfe3211541ab9622fcefea905e75ce3e
|
[
"MIT"
] | 15
|
2020-11-06T02:05:48.000Z
|
2021-09-03T23:38:40.000Z
|
from time import sleep
from fake_useragent import UserAgent
from proxyscrape import create_collector
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
import requests
import random
import json
import pathlib
import math
def random_sleep(min_sec: int = 0, max_sec: int = 1):
sec = round(random.uniform(min_sec, max_sec), 2)
# print('Sleeping for ' + str(sec) + ' sec...')
sleep(sec)
def is_registered(html):
# registered = '<span class="ccd-page-heading">Yes, you are registered!</span>'
registered = 'Yes, you are registered!'
if registered in html:
return True
return False
def has_voted(html):
if 'Ballot received' in html:
return True
return False
def read_json_file(file: str):
with open(file) as json_file:
return json.load(json_file)
def write_json_file(file: str, data):
with open(file, 'w') as outfile:
json.dump(data, outfile)
def get_proxy(collector: create_collector, config: dict):
return collector.get_proxy({
'code': config['proxy']['locations'],
'anonymous': config['proxy']['anonymous']
})
def format_proxies_for_requests(http, https):
return {
'http': 'http://' + http.host + ':' + http.port,
'https': 'http://' + https.host + ':' + https.port,
}
def save_results(results, state, county, zip_code, death_year, birth_year):
dir_path = get_result_file_path_dir(state, county, zip_code, death_year)
pathlib.Path(dir_path).mkdir(parents=True, exist_ok=True)
path = get_result_file_path(state, county, zip_code, death_year, birth_year)
with open(path, 'w') as outfile:
json.dump(results, outfile)
print('saved results...', end=' ')
def read_results(state, county, zip_code, death_year, birth_year):
results = []
result_file = pathlib.Path(get_result_file_path(state, county, zip_code, death_year, birth_year))
if result_file.is_file():
with open(result_file, 'r') as file:
results = json.load(file)
return results
def get_result_file_path(state, county, zip_code, death_year, birth_year):
dir_path = get_result_file_path_dir(state, county, zip_code, death_year)
file = str(birth_year) + '.json'
return dir_path + file
def get_result_file_path_dir(state, county, zip_code, death_year):
dir_path = './output/deaths/' + state + '/' + county + '/' + str(zip_code) + '/' + str(death_year) + '/'
return dir_path
def calculate_pagination(per_page, total):
pages = math.ceil(float(total) / per_page)
offset_positions = [{'page': 0, 'offset': 0}]
for page in range(1, pages):
offset = per_page * page
offset_positions.append({'page': page, 'offset': offset})
return offset_positions
def send_http_post(url, payload, headers, proxies):
requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
with requests.Session() as s:
retries = Retry(
total=10,
backoff_factor=0.5,
status_forcelist=[500, 502, 503, 504],
method_whitelist=frozenset(['GET', 'POST'])
)
s.mount('http://', HTTPAdapter(max_retries=retries))
s.mount('https://', HTTPAdapter(max_retries=retries))
response = s.post(
url,
data=payload,
proxies=proxies,
headers=headers,
timeout=5.0,
verify=False
)
return response
def get_user_agent():
print('Obtaining a random user agent...')
try:
ua = UserAgent(verify_ssl=False)
ua.update()
return ua.random
except Exception:
print('Failed to obtain a useragent, using fallback method...')
return random.choice([
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.94 Chrome/37.0.2062.94 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/8.0.8 Safari/600.8.9',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/8.0.7 Safari/600.7.12',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/7.1.8 Safari/537.85.17',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0)',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/600.6.3 (KHTML, like Gecko) Version/8.0.6 Safari/600.6.3',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/8.0.5 Safari/600.5.17',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/6.0)',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 7077.134.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.156 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.7.12 (KHTML, like Gecko) Version/7.1.7 Safari/537.85.16',
'Mozilla/5.0 (Windows NT 6.0; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/8.0.3 Safari/600.3.18',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B440 Safari/600.1.4',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-us; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12D508 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.6.3 (KHTML, like Gecko) Version/7.1.6 Safari/537.85.15',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/8.0.4 Safari/600.4.10',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/7.0.6 Safari/537.78.2',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B410 Safari/600.1.4',
'Mozilla/5.0 (iPad; CPU OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; ARM; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFASWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12F70 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; U; Android 4.0.4; en-us; KFJWI Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D167 Safari/9537.53',
'Mozilla/5.0 (X11; CrOS armv7l 7077.134.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.156 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0.2 Safari/600.2.5',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.1.25 (KHTML, like Gecko) Version/8.0 Safari/600.1.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.56 (KHTML, like Gecko) Version/9.0 Safari/601.1.56',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSOWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 5_1_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3',
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10240',
'Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDRJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFAPWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-us; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFARWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A405 Safari/600.1.4',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.77.4 (KHTML, like Gecko) Version/7.0.5 Safari/537.77.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie11; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 10.0; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAGWJS; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; Linux x86_64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.5.17 (KHTML, like Gecko) Version/7.1.5 Safari/537.85.14',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP06; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/600.4.8 (KHTML, like Gecko) Version/8.0.3 Safari/600.4.8',
'Mozilla/5.0 (iPad; CPU OS 7_0_6 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B651 Safari/9537.53',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.3.18 (KHTML, like Gecko) Version/7.1.3 Safari/537.85.12',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko; Google Web Preview) Chrome/27.0.1453 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A365 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321',
'Mozilla/5.0 (iPad; CPU OS 7_0_3 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B511 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.1.17 (KHTML, like Gecko) Version/7.1 Safari/537.85.10',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/7.1.2 Safari/537.85.11',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.1) Gecko/2008070208 Firefox/3.0.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.34 (KHTML, like Gecko) Qt/4.8.5 Safari/534.34',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53 BingPreview/1.0b',
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 7262.52.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.86 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/600.4.10 (KHTML, like Gecko) Version/7.1.4 Safari/537.85.13',
'Mozilla/5.0 (Unknown; Linux x86_64) AppleWebKit/538.1 (KHTML, like Gecko) PhantomJS/2.0.0 Safari/538.1',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Android; Tablet; rv:40.0) Gecko/40.0 Firefox/40.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0.2 Safari/600.2.5',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/536.30.1 (KHTML, like Gecko) Version/6.0.5 Safari/536.30.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSAWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAAU; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.74.9 (KHTML, like Gecko) Version/7.0.2 Safari/537.74.9',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_0_2 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A501 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_0 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11A465 Safari/9537.53',
'Mozilla/5.0 (Windows NT 10.0; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.78.2 (KHTML, like Gecko) Version/7.0.6 Safari/537.78.2',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MASMJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; FunWebProducts; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T230NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSWOL; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Linux; U; Android 4.0.4; en-us; KFJWA Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Linux; Android 4.0.4; BNTV600 Build/IMM76L) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.111 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B440 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie9; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A4325c Safari/601.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B466 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12D508 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; WOW64; Trident/7.0; .NET4.0E; .NET4.0C)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (PlayStation 4 2.57) AppleWebKit/537.73 (KHTML, like Gecko)',
'Mozilla/5.0 (Windows NT 6.1; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Linux; Android 5.0; SM-G900V Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 7 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/6.0; Touch)',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; ASJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SCH-I545 4G Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMSN; rv:11.0) like Gecko',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MASAJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MALC; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.24 (KHTML, like Gecko) Chrome/33.0.0.0 Safari/534.24',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie10; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G900A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-gb; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0)',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; CrOS x86_64 7077.111.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.0.4; BNTV400 Build/IMM76L) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.111 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.18.US Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; GWX:QUALIFIED)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4043.US Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:23.0) Gecko/20100101 Firefox/23.0',
'Mozilla/5.0 (Windows NT 5.1; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/44.0.2403.89 Chrome/44.0.2403.89 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 6_0_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A523 Safari/8536.25',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MANM; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:32.0) Gecko/20100101 Firefox/32.0',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; MDDRJS)',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.13.US Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (X11; Linux x86_64; U; en-us) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 6946.86.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; MDDRJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; GIL 3.5; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LG-V410/V41010d Build/KOT49I.V41010d) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.1599.103 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/537.75.14',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B411 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/534.34 (KHTML, like Gecko) Qt/4.8.1 Safari/534.34',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; USPortal; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H143',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; SMJB; rv:11.0) like Gecko',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CMDTDF; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (iPad; CPU OS 6_1_2 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B146 Safari/8536.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (MSIE 9.0; Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (X11; FC Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0',
'Mozilla/5.0 (X11; CrOS armv7l 7262.52.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.86 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MASAJS; rv:11.0) like Gecko',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; MS-RTC LM 8; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; yie11; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36 Edge/12.10532',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSMSE; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:29.0) Gecko/20100101 Firefox/29.0',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20100101 Firefox/29.0',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T320 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (Linux; Android 5.0.2; LG-V410/V41020c Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.1847.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/7.0.55539 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12F69',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFTHWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Android; Mobile; rv:40.0) Gecko/40.0 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4043.US Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-P600 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 6812.88.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.153 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.65 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10) AppleWebKit/537.16 (KHTML, like Gecko) Version/8.0 Safari/537.16',
'Mozilla/5.0 (Windows NT 6.1; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900V 4G Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.3; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; CMDTDF; .NET4.0C; .NET4.0E; GWX:QUALIFIED)',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; GT-P5210 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MDDSJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.2; QTAQZ3 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; QMV7B Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (iPad; CPU OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B436 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-ca; KFTT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NISSC; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9) AppleWebKit/537.71 (KHTML, like Gecko) Version/7.0 Safari/537.71',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; MALC; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MSBrowserIE; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SM-N910V 4G Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.76 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.89 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.65 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; LCJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.0; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T700 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG-SM-N910A Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:29.0) Gecko/20120101 Firefox/29.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8 (.NET CLR 3.5.30729)',
'Mozilla/5.0 (X11; CrOS x86_64 7077.95.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER',
'Mozilla/5.0 (Windows NT 6.1; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/7.0)',
'Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B466 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; Win64; x64; Trident/6.0; .NET4.0E; .NET4.0C; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727)',
'Mozilla/5.0 (Linux; Android 5.0.2; VK810 4G Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.76.4 (KHTML, like Gecko) Version/7.0.4 Safari/537.76.4',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; SMJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; BOIE9;ENUS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 5.1; rv:41.0) Gecko/20100101 Firefox/41.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11) AppleWebKit/601.1.50 (KHTML, like Gecko) Version/9.0 Safari/601.1.50',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; GWX:RESERVED)',
'Mozilla/5.0 (iPad; CPU OS 6_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B141 Safari/8536.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/601.1.56 (KHTML, like Gecko) Version/9.0 Safari/601.1.56',
'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 7 Build/LMY47V) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B440 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) MsnBot-Media /1.0b',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.3; WOW64; Trident/7.0)',
'Mozilla/5.0 (Linux; Android 5.1.1; SM-G920V Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 6680.78.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.102 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T520 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T900 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.94 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12D508 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.1.2; GT-N8013 Build/JZO54K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFAPWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MALCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; rv:30.0) Gecko/20100101 Firefox/30.0',
'Mozilla/5.0 (Linux; Android 5.0.1; SM-N910V Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B436 Safari/600.1.4',
'Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B466 Safari/600.1.4',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A405 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T310 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.45 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 10 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 7077.123.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (Linux; Android 4.4.2; QMV7A Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B554a Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-N900A Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.4; XT1080 Build/SU6-7.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/6.0.51363 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; ASJB; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.73.11 (KHTML, like Gecko) Version/7.0.1 Safari/537.73.11',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0; TNJB; 1ButtonTaskbar)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'Mozilla/5.0 (Windows Phone 8.1; ARM; Trident/7.0; Touch; rv:11.0; IEMobile/11.0; NOKIA; Lumia 635) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 5_0_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A405 Safari/7534.48.3',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-N910P Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 [Pinterest/iOS]',
'Mozilla/5.0 (Linux; Android 5.0.1; LGLK430 Build/LRX21Y) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/38.0.2125.102 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 Safari',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0; 1ButtonTaskbar)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; NP08; MAAU; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T217S Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMSE; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (Windows NT 5.1; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.76 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1; XT1254 Build/SU3TL-39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B440 Safari/600.1.4',
'Mozilla/5.0 (MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/44.0.2403.67 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG-SGH-I337 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.3; KFASWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/5.0 (X11; CrOS armv7l 7077.111.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.67 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A403 Safari/8536.25',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.0 Chrome/38.0.2125.102 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0; SM-G900V Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MAGWJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; ATT-IE11; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.103 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7) AppleWebKit/534.48.3 (KHTML, like Gecko) Version/5.1 Safari/534.48.3',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.2; WOW64; Trident/7.0; .NET4.0E; .NET4.0C; .NET CLR 3.5.30729; .NET CLR 2.0.50727; .NET CLR 3.0.30729)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.13 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.114 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:32.0) Gecko/20100101 Firefox/32.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_2 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12D508 Safari/600.1.4',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Version/7.0 Mobile/11D167 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0; MSN 9.0;MSN 9.1;MSN 9.6;MSN 10.0;MSN 10.2;MSN 10.5;MSN 11;MSN 11.5; MSNbMSNI; MSNmen-us; MSNcOTH) like Gecko',
'Mozilla/5.0 (Windows NT 5.1; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/7.0; 1ButtonTaskbar)',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.102 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 YaBrowser/15.7.2357.2877 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:27.0) Gecko/20100101 Firefox/27.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSMSNIP; rv:11.0) like Gecko',
'Mozilla/5.0 AppleWebKit/999.0 (KHTML, like Gecko) Chrome/99.0 Safari/999.0',
'Mozilla/5.0 (X11; OpenBSD amd64; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/538.1 (KHTML, like Gecko) PhantomJS/2.0.0 Safari/538.1',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MAGWJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.2; GT-N5110 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12B410 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:25.7) Gecko/20150824 Firefox/31.9 PaleMoon/25.7.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 9_0 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13A4325c Safari/601.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; MS-RTC LM 8; InfoPath.3)',
'Mozilla/5.0 (Linux; Android 4.4.2; RCT6203W46 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 6.3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; Tablet PC 2.0)',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; EIE10;ENUSWOL; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.4; en-us; SAMSUNG SM-N910T Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/2.0 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; RCT6203W46 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.0.4; en-ca; KFJWI Build/IMM76D) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/34.0.1847.116 Chrome/34.0.1847.116 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.137 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.45 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:27.0) Gecko/20100101 Firefox/27.0',
'Mozilla/5.0 (Linux; Android 4.4.2; RCT6773W22 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; ASJB; ASJB; MAAU; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; U; CPU OS 3_2 like Mac OS X; en-us) AppleWebKit/531.21.10 (KHTML, like Gecko) Version/4.0.4 Mobile/7B367 Safari/531.21.10',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.7) Gecko/20150824 Firefox/31.9 PaleMoon/25.7.0',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G870A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.3; KFSOWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.2)',
'Mozilla/5.0 (Windows NT 5.2; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE10;ENUSMCM; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G920P Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MALCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.2; rv:29.0) Gecko/20100101 Firefox/29.0 /29.0',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T550 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-gb; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-P900 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 9 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T530NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; SM-T330NU Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.7.1000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:35.0) Gecko/20100101 Firefox/35.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN) AppleWebKit/530.19.2 (KHTML, like Gecko) Version/4.0.2 Safari/530.19.1',
'Mozilla/5.0 (Android; Tablet; rv:34.0) Gecko/34.0 Firefox/34.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MALCJS; rv:11.0) like Gecko',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/8.0.57838 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.146 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; yie10; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Ubuntu 14.04) AppleWebKit/537.36 Chromium/35.0.1870.2 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; yie11; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; WOW64; Trident/8.0; TNJB; 1ButtonTaskbar)',
'Mozilla/5.0 (Linux; Android 4.4.2; RCT6773W22 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG-SM-G900A Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; zh-CN; rv:1.9.0.8) Gecko/2009032609 Firefox/3.0.8 (.NET CLR 3.5.30729)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.65 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.7.1000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T210R Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; rv:40.0) Gecko/20100101 Firefox/40.0.2 Waterfox/40.0.2',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900P Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.104 AOL/9.8 AOLBuild/4346.18.US Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.22 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T350 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; ASU2JS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T530NU Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/7.0; 1ButtonTaskbar)',
'Mozilla/5.0 (Linux; Android 5.0.2; SAMSUNG-SM-G920A Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.0 Chrome/38.0.2125.102 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MAAU; MAAU; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0 Iceweasel/38.2.1',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; MANM; MANM; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) BingPreview/1.0b',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.94 AOL/9.7 AOLBuild/4343.4049.US Safari/537.36',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.104 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.107 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.2; QTAQZ3 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.135 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 OverDrive Media Console/3.3.1',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) Mobile/11D257',
'Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/7.0.55539 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SCH-I545 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0; SM-G900P Build/LRX21T) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_0 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12A365 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 5.1; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:31.0) Gecko/20100101 Firefox/31.0',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MDDCJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (iPad;U;CPU OS 5_1_1 like Mac OS X; zh-cn)AppleWebKit/534.46.0(KHTML, like Gecko)CriOS/19.0.1084.60 Mobile/9B206 Safari/7534.48.3',
'Mozilla/5.0 (Linux; Android 4.4.3; KFAPWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_1_1 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/11D201 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.124 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MAMIJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; VS985 4G Build/LRX21Y) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/45.0.2454.68 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.0; WOW64; rv:39.0) Gecko/20100101 Firefox/39.0',
'Mozilla/5.0 (Linux; Android 5.0.2; LG-V410/V41020b Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.1847.118 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_1_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12B435 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; InfoPath.3; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.2; WOW64; rv:40.0) Gecko/20100101 Firefox/40.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; MDDRJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.2000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.3; WOW64; Trident/6.0)',
'Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G920T Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3; MS-RTC LM 8)',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2503.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.3; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/34.0.0.0 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.3; KFSAWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1; rv:32.0) Gecko/20100101 Firefox/32.0',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T230NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.2.2; SM-T110 Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SM-N910T Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Win64; x64; Trident/7.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.154 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36',
'Mozilla/5.0 (X11; CrOS armv7l 6946.86.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.94 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 Firefox/38.0 SeaMonkey/2.35',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T330NU Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 6_0_1 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A8426 Safari/8536.25',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.2; LG-V410 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 TheWorld 6',
'Mozilla/5.0 (iPad; CPU OS 8_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12B410 Safari/600.1.4',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/600.2.5 (KHTML, like Gecko) Version/8.0 Safari/600.1.25',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; EIE10;ENUSWOL)',
'Mozilla/5.0 (iPad; CPU OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12H143 Safari/600.1.4',
'Mozilla/5.0 (iPad; CPU OS 8_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) CriOS/43.0.2357.61 Mobile/12F69 Safari/600.1.4',
'Mozilla/5.0 (Linux; Android 4.4.2; SM-T237P Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; ATT; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.90 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.2; SM-T800 Build/LRX22G) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.133 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; EIE10;ENUSMSN; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MATBJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.107 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.4.2; en-us; LGMS323 Build/KOT49I.MS32310c) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.1599.103 Mobile Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; EIE11;ENUSMSN; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.4.6.1000 Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; rv:29.0) Gecko/20100101 Firefox/29.0',
'Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.114 Safari/537.36 Puffin/4.5.0IT',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; yie8; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-gb; KFTHWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; FunWebProducts; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2505.0 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MALNJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; BOIE9;ENUSSEM; rv:11.0) like Gecko',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0; Touch; WebView/1.0)',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 5_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9B176 Safari/7534.48.3',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.157 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.85 Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.0.1; SAMSUNG SPH-L720 Build/LRX22C) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Trident/7.0; yie9; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.143 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.4.3; en-us; KFSAWA Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (compatible; Windows NT 6.1; Catchpoint) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/29.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:32.0) Gecko/20100101 Firefox/32.0',
'Mozilla/5.0 (Windows NT 6.0; rv:38.0) Gecko/20100101 Firefox/38.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.4; Z970 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/30.0.0.0 Mobile Safari/537.36',
'Mozilla/5.0 (Linux; Android 5.1.1; Nexus 5 Build/LMY48I) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Mobile Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10',
'Mozilla/5.0 (X11; CrOS armv7l 6812.88.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.153 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2227.1 Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 6_1_3 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10B329 Safari/8536.25',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; )',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MASAJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.134 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.63 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 BIDUBrowser/7.6 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; MASMJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 10.0; Trident/7.0; Touch; rv:11.0) like Gecko',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; InfoPath.3; .NET4.0C; .NET4.0E; MS-RTC LM 8)',
'Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; Touch; MAGWJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.1.1; SAMSUNG SM-G925T Build/LMY47X) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/3.2 Chrome/38.0.2125.102 Mobile Safari/537.36',
'Mozilla/5.0 (X11; CrOS x86_64 6457.107.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4.17.9 (KHTML, like Gecko) Version/5.1 Mobile/9B206 Safari/7534.48.3',
'Mozilla/5.0 (Linux; Android 4.2.2; GT-P5113 Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (X11; Linux i686; rv:24.0) Gecko/20100101 Firefox/24.0 DejaClick/2.5.0.11',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.154 Safari/537.36 LBBROWSER',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/5.0 (Linux; Android 4.4.3; KFARWI Build/KTU84M) AppleWebKit/537.36 (KHTML, like Gecko) Silk/44.1.81 like Chrome/44.0.2403.128 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.117 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/8.0.57838 Mobile/12B466 Safari/600.1.4',
'Mozilla/5.0 (Unknown; Linux i686) AppleWebKit/534.34 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/534.34',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP08; MAAU; NP08; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 4.4.2; LG-V410 Build/KOT49I.V41010d) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 Safari/537.36 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Windows NT 6.1; rv:28.0) Gecko/20100101 Firefox/28.0',
'Mozilla/5.0 (X11; CrOS x86_64 6946.70.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.132 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:33.0) Gecko/20100101 Firefox/33.0',
'Mozilla/5.0 (iPod touch; CPU iPhone OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Version/8.0 Mobile/12H321 Safari/600.1.4',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:38.0) Gecko/20100101 IceDragon/38.0.5 Firefox/38.0.5',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; managedpc; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.116 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.111 Safari/537.36',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-ca; KFOT Build/IML74K) AppleWebKit/537.36 (KHTML, like Gecko) Silk/3.68 like Chrome/39.0.2171.93 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.2.2; Le Pan TC802A Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 7_1_2 like Mac OS X) AppleWebKit/537.51.2 (KHTML, like Gecko) GSA/6.0.51363 Mobile/11D257 Safari/9537.53',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36 LBBROWSER',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.2; ARM; Trident/7.0; Touch; rv:11.0; WPDesktop; Lumia 1520) like Gecko',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.65 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:42.0) Gecko/20100101 Firefox/42.0',
'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_6 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Version/7.0 Mobile/11B651 Safari/9537.53',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E; 360SE)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.103 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.3; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.76 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.87 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; PRU_IE; rv:11.0) like Gecko',
'Mozilla/5.0 (X11; Linux i686) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/37.0.2062.120 Chrome/37.0.2062.120 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 8_4_1 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12H321 [FBAN/FBIOS;FBAV/38.0.0.6.79;FBBV/14316658;FBDV/iPad4,1;FBMD/iPad;FBSN/iPhone OS;FBSV/8.4.1;FBSS/2; FBCR/;FBID/tablet;FBLC/en_US;FBOP/1]',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.155 Safari/537.36 OPR/31.0.1889.174',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; NP02; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.111 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Win64; x64; Trident/4.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (X11; CrOS x86_64 6946.63.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:37.0) Gecko/20100101 Firefox/37.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.115 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.0.9895 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.4.4; Nexus 7 Build/KTU84P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.84 Safari/537.36',
'Mozilla/5.0 (Linux; Android 4.2.2; QMV7B Build/JDQ39) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; Touch; MASMJS; rv:11.0) like Gecko',
'Mozilla/5.0 (compatible; MSIE 10.0; AOL 9.7; AOLBuild 4343.1028; Windows NT 6.1; WOW64; Trident/7.0)',
'Mozilla/5.0 (Linux; U; Android 4.0.3; en-us) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/31.0.1650.59 Mobile Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Trident/7.0; Touch; TNJB; rv:11.0) like Gecko',
'Mozilla/5.0 (iPad; CPU OS 8_1_3 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) Mobile/12B466',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; Active Content Browser)',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; InfoPath.3)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.81 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Win64; x64; Trident/6.0; WebView/1.0)',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/44.0.2403.89 Safari/537.36',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/40.0.2214.91 Safari/537.36',
'Mozilla/5.0 (iPad; U; CPU OS 5_0 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A334 Safari/7534.48.3',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.135 Safari/537.36',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.130 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) coc_coc_browser/50.0.125 Chrome/44.0.2403.125 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/43.0.2357.124 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.3; Win64; x64; Trident/7.0; MAARJS; rv:11.0) like Gecko',
'Mozilla/5.0 (Linux; Android 5.0; SAMSUNG SM-N900T Build/LRX21V) AppleWebKit/537.36 (KHTML, like Gecko) SamsungBrowser/2.1 Chrome/34.0.1847.76 Mobile Safari/537.36',
'Mozilla/5.0 (iPhone; CPU iPhone OS 8_4 like Mac OS X) AppleWebKit/600.1.4 (KHTML, like Gecko) GSA/7.0.55539 Mobile/12H143 Safari/600.1.4'
])
| 114.813499
| 252
| 0.648352
| 23,702
| 129,280
| 3.512826
| 0.027804
| 0.024165
| 0.103338
| 0.123839
| 0.952331
| 0.949592
| 0.945004
| 0.941809
| 0.931589
| 0.919254
| 0
| 0.226048
| 0.193356
| 129,280
| 1,125
| 253
| 114.915556
| 0.572366
| 0.000951
| 0
| 0.005479
| 0
| 0.912329
| 0.849792
| 0.005149
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012785
| false
| 0
| 0.009132
| 0.001826
| 0.034703
| 0.00274
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3bfd507ed10505ad3e37e328f7f1c0187a8253c8
| 3,522
|
py
|
Python
|
tests/test_gvae.py
|
koukyo1994/weakly-supervised-disentangled-representations
|
e6010cc0b980f580fb2e9afd1336f1b14a471530
|
[
"MIT"
] | 2
|
2020-11-09T18:44:10.000Z
|
2021-07-16T16:44:26.000Z
|
tests/test_gvae.py
|
koukyo1994/weakly-supervised-disentangled-representations
|
e6010cc0b980f580fb2e9afd1336f1b14a471530
|
[
"MIT"
] | null | null | null |
tests/test_gvae.py
|
koukyo1994/weakly-supervised-disentangled-representations
|
e6010cc0b980f580fb2e9afd1336f1b14a471530
|
[
"MIT"
] | null | null | null |
import os
import sys
import pytest
import torch.utils.data as torchdata
sys.path.append(os.getcwd())
os.environ["DISENTANGLEMENT_LIB_DATA"] = "./data"
from disentanglement_lib.data.ground_truth.named_data import get_named_ground_truth_data # noqa
from dataset.pytorch import WeaklySupervisedDataset # noqa
from models import GroupVAE # noqa
@pytest.fixture
def data_generator():
generator = get_named_ground_truth_data("dsprites_full")
return generator
def test_gvae_aggregate_label_multi(data_generator):
dataset = WeaklySupervisedDataset(data_generator, seed=0, k=1, return_index=False)
loader = torchdata.DataLoader(dataset, batch_size=10)
model = GroupVAE(input_shape=[1, 64, 64], beta=16.0, aggregation="label", label_mode="multi")
x, label = next(iter(loader))
reconstructed_0, reconstructed_1, mu_0, logvar_0, mu_1, logvar_1 = model(x, label)
assert reconstructed_0.size() == reconstructed_1.size()
assert reconstructed_0.size(0) == x.size(0)
assert reconstructed_0.size(1) == x.size(2)
assert reconstructed_0.size(2) == x.size(3)
assert reconstructed_0.size(3) == x.size(4)
assert mu_0.size() == mu_1.size()
assert mu_0.size() == logvar_0.size()
assert mu_0.size() == logvar_1.size()
assert mu_0.size(0) == reconstructed_0.size(0)
loss, recons_error, kld = model.loss_fn(
x, reconstructed_0, reconstructed_1,
mu_0, logvar_0, mu_1, logvar_1)
assert loss == recons_error + 16.0 * kld
def test_gvae_aggregate_label_single(data_generator):
dataset = WeaklySupervisedDataset(data_generator, seed=0, k=1, return_index=True)
loader = torchdata.DataLoader(dataset, batch_size=10)
model = GroupVAE(input_shape=[1, 64, 64], beta=16.0, aggregation="label", label_mode="single")
x, label = next(iter(loader))
reconstructed_0, reconstructed_1, mu_0, logvar_0, mu_1, logvar_1 = model(x, label)
assert reconstructed_0.size() == reconstructed_1.size()
assert reconstructed_0.size(0) == x.size(0)
assert reconstructed_0.size(1) == x.size(2)
assert reconstructed_0.size(2) == x.size(3)
assert reconstructed_0.size(3) == x.size(4)
assert mu_0.size() == mu_1.size()
assert mu_0.size() == logvar_0.size()
assert mu_0.size() == logvar_1.size()
assert mu_0.size(0) == reconstructed_0.size(0)
loss, recons_error, kld = model.loss_fn(
x, reconstructed_0, reconstructed_1,
mu_0, logvar_0, mu_1, logvar_1)
assert loss == recons_error + 16.0 * kld
def test_gvae_aggregate_argmax(data_generator):
dataset = WeaklySupervisedDataset(data_generator, seed=0, k=1, return_index=False)
loader = torchdata.DataLoader(dataset, batch_size=10)
model = GroupVAE(input_shape=[1, 64, 64], beta=16.0, aggregation="argmax")
x, label = next(iter(loader))
reconstructed_0, reconstructed_1, mu_0, logvar_0, mu_1, logvar_1 = model(x, label)
assert reconstructed_0.size() == reconstructed_1.size()
assert reconstructed_0.size(0) == x.size(0)
assert reconstructed_0.size(1) == x.size(2)
assert reconstructed_0.size(2) == x.size(3)
assert reconstructed_0.size(3) == x.size(4)
assert mu_0.size() == mu_1.size()
assert mu_0.size() == logvar_0.size()
assert mu_0.size() == logvar_1.size()
assert mu_0.size(0) == reconstructed_0.size(0)
loss, recons_error, kld = model.loss_fn(
x, reconstructed_0, reconstructed_1,
mu_0, logvar_0, mu_1, logvar_1)
assert loss == recons_error + 16.0 * kld
| 34.871287
| 98
| 0.704145
| 531
| 3,522
| 4.425612
| 0.13936
| 0.070213
| 0.137872
| 0.153191
| 0.849362
| 0.817021
| 0.817021
| 0.817021
| 0.817021
| 0.817021
| 0
| 0.052129
| 0.166667
| 3,522
| 100
| 99
| 35.22
| 0.748552
| 0.003975
| 0
| 0.714286
| 0
| 0
| 0.019977
| 0.006849
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.057143
| false
| 0
| 0.1
| 0
| 0.171429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ce25a6171b380f0c4312d85e2e101210b0c4be19
| 5,810
|
py
|
Python
|
test/test_packet.py
|
mcdeoliveira/ctrl
|
6c6062c6d1e9902178500abcd10be6ac0bcf043d
|
[
"Apache-2.0"
] | 12
|
2017-06-20T13:20:40.000Z
|
2021-01-18T00:12:10.000Z
|
test/test_packet.py
|
mcdeoliveira/beaglebone
|
6c6062c6d1e9902178500abcd10be6ac0bcf043d
|
[
"Apache-2.0"
] | 2
|
2017-06-12T15:17:24.000Z
|
2018-01-30T18:22:19.000Z
|
test/test_packet.py
|
mcdeoliveira/beaglebone
|
6c6062c6d1e9902178500abcd10be6ac0bcf043d
|
[
"Apache-2.0"
] | 4
|
2017-09-25T12:19:19.000Z
|
2019-01-31T21:46:24.000Z
|
import struct
import numpy
import io
import pickle
import pyctrl.packet as packet
def testA():
# test A
assert packet.pack('A','C') == b'AC'
assert packet.pack('A','B') == b'AB'
assert packet.pack('A','C') != b'AB'
assert packet.unpack_stream(io.BytesIO(b'AC')) == ('A', 'C')
assert packet.unpack_stream(io.BytesIO(b'AB')) == ('A', 'B')
assert packet.unpack_stream(io.BytesIO(b'AB')) != ('A', 'C')
def testC():
# test C
assert packet.pack('C','C') == b'CC'
assert packet.pack('C','B') == b'CB'
assert packet.pack('C','C') != b'CB'
assert packet.unpack_stream(io.BytesIO(b'CC')) == ('C', 'C')
assert packet.unpack_stream(io.BytesIO(b'CB')) == ('C', 'B')
assert packet.unpack_stream(io.BytesIO(b'CB')) != ('C', 'C')
def testS():
# test S
assert packet.pack('S','abc') == struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.pack('S','abcd') != struct.pack('<cI3s', b'S', 3, b'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) == ('S', 'abc')
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cI3s', b'S', 3, b'abc'))) != ('S', 'abcd')
def testIFD():
# test I
assert packet.pack('I',3) == struct.pack('<ci', b'I', 3)
assert packet.pack('I',3) != struct.pack('<ci', b'I', 4)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 3))) == ('I', 3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<ci', b'I', 4))) != ('I', 3)
# test F
assert packet.pack('F',3.3) == struct.pack('<cf', b'F', 3.3)
assert packet.pack('F',3.3) != struct.pack('<cf', b'F', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', numpy.float32(3.3)))) == ('F', numpy.float32(3.3))
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cf', b'F', 4.3))) != ('F', 3.3)
# test D
assert packet.pack('D',3.3) == struct.pack('<cd', b'D', 3.3)
assert packet.pack('D',3.3) != struct.pack('<cd', b'D', 4.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 3.3))) == ('D', 3.3)
assert packet.unpack_stream(
io.BytesIO(struct.pack('<cd', b'D', 4.3))) != ('D', 3.3)
def testV():
# test VI
vector = numpy.array((1,2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, 2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
vector = numpy.array((1,-2,3), int)
assert packet.pack('V',vector) == struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIiii', b'V', b'I', 3, 1, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VF
vector = numpy.array((1.3,-2,3), numpy.float32)
assert packet.pack('V',vector) == struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIfff', b'V', b'F', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
# test VD
vector = numpy.array((1.3,-2,3), float)
assert packet.pack('V',vector) == struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<ccIddd', b'V', b'D', 3, 1.3, -2, 3)))
assert type == 'V'
assert numpy.all(rvector == vector)
def testM():
# test MI
vector = numpy.array(((1,2,3), (3,4,5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, 2, 3, 3, 4, 5)))
assert type == 'M'
assert numpy.all(rvector == vector)
vector = numpy.array(((1,-2,3), (3,4,-5)), int)
assert packet.pack('M',vector) == struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIiiiiii', b'M', 2, b'V', b'I', 6, 1, -2, 3, 3, 4, -5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MF
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float32)
assert packet.pack('M',vector) == struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIffffff', b'M', 2, b'V', b'F', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
# test MD
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
assert packet.pack('M',vector) == struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)
(type, rvector) = packet.unpack_stream(
io.BytesIO(struct.pack('<cIccIdddddd', b'M', 2, b'V', b'D', 6, 1.3, -2, 3, 0, -1, 2.5)))
assert type == 'M'
assert numpy.all(rvector == vector)
def testP():
vector = numpy.array(((1.3,-2,3), (0,-1,2.5)), numpy.float)
string = packet.pack('P', vector)
(type, rvector) = packet.unpack_stream(io.BytesIO(string))
assert type == 'P'
assert numpy.all(rvector == vector)
def testKR():
args = { 'a': 1, 'b': 2 }
string = packet.pack('K', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'K'
assert (args == rargs)
args = ('a', 1, 'b', 2)
string = packet.pack('R', args)
(type, rargs) = packet.unpack_stream(io.BytesIO(string))
assert type == 'R'
assert (args == rargs)
if __name__ == "__main__":
testA()
testC()
testS()
testIFD()
testV()
testM()
testP()
testKR()
| 32.640449
| 113
| 0.552496
| 949
| 5,810
| 3.347734
| 0.082192
| 0.135977
| 0.141643
| 0.157381
| 0.876298
| 0.860875
| 0.824363
| 0.75543
| 0.725527
| 0.678628
| 0
| 0.04734
| 0.207401
| 5,810
| 177
| 114
| 32.824859
| 0.642562
| 0.015318
| 0
| 0.322314
| 0
| 0
| 0.067987
| 0
| 0
| 0
| 0
| 0
| 0.479339
| 1
| 0.066116
| false
| 0
| 0.041322
| 0
| 0.107438
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
023c7003725f54c9d11bdefcc97842ecf6eb0e12
| 5,076
|
py
|
Python
|
tests/test_Linalg.py
|
MaxInGaussian/GomPlex
|
fae90f7cca778ce9ba5a9a1d3048f3d78de2e289
|
[
"BSD-3-Clause"
] | 8
|
2018-09-04T12:38:14.000Z
|
2021-12-23T04:06:23.000Z
|
tests/test_Linalg.py
|
MaxInGaussian/GomPlex
|
fae90f7cca778ce9ba5a9a1d3048f3d78de2e289
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_Linalg.py
|
MaxInGaussian/GomPlex
|
fae90f7cca778ce9ba5a9a1d3048f3d78de2e289
|
[
"BSD-3-Clause"
] | 2
|
2019-12-09T09:11:52.000Z
|
2021-01-31T10:55:29.000Z
|
################################################################################
# Github: https://github.com/MaxInGaussian/GomPlex
# Author: Max W. Y. Lam (maxingaussian@gmail.com)
################################################################################
from sys import path
path.append("../")
import numpy as np
from scipy import linalg
from timeit import Timer
from GomPlex import *
time_reps = 1
N, M, K, D = 1000, 500, 10, 30
X = .5*np.random.rand(N, D)
spectral_freqs = np.random.randn(D, K)
X_nfft = X.dot(spectral_freqs)
x = X_nfft.ravel()
f = np.repeat(np.sin(-20*np.pi*np.mean(X_nfft, 1)), K)+0j
f_hat = np.random.randn(M)+0j
noise = 1e-2
y = f+np.random.randn(N*K)*np.sqrt(noise)
print()
print('test of nfft')
f_true = ndft(x, f_hat, M)
f_fast = nfft(x, f_hat, M)
print('approx l0 error:', np.max(np.abs(f_true-f_fast)))
print('approx l1 error:', np.mean(np.abs(f_true-f_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(f_true-f_fast)**2)))
timer = Timer(lambda:ndft(x, f_hat, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:nfft(x, f_hat, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of adj_nfft')
f_hat_true = adj_ndft(x, f, M)
f_hat_fast = adj_nfft(x, f, M)
print('approx l0 error:', np.max(np.abs(f_hat_true-f_hat_fast)))
print('approx l1 error:', np.mean(np.abs(f_hat_true-f_hat_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(f_hat_true-f_hat_fast)**2)))
timer = Timer(lambda:adj_ndft(x, f, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:adj_nfft(x, f, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_Phi_algo_1')
pinv_true = numpy_solve_Phi(f, x, M)
pinv_fast = solve_Phi_algo_1(f, x, M)
print('approx l0 error:', np.max(np.abs(pinv_true-pinv_fast)))
print('approx l1 error:', np.mean(np.abs(pinv_true-pinv_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(pinv_true-pinv_fast)**2)))
timer = Timer(lambda:numpy_solve_Phi(f, x, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_Phi_algo_1(f, x, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_Phi_algo_2')
pinv_true = numpy_solve_Phi(f, x, M)
pinv_fast = solve_Phi_algo_2(f, x, M)
print('approx l0 error:', np.max(np.abs(pinv_true-pinv_fast)))
print('approx l1 error:', np.mean(np.abs(pinv_true-pinv_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(pinv_true-pinv_fast)**2)))
timer = Timer(lambda:numpy_solve_Phi(f, x, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_Phi_algo_2(f, x, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_A_tilde_algo_1')
pinvA_true = numpy_solve_A_tilde(f_hat, x, M)
pinvA_fast = solve_A_tilde_algo_1(f_hat, x, M)
print('approx l0 error:', np.max(np.abs(pinvA_true-pinvA_fast)))
print('approx l1 error:', np.mean(np.abs(pinvA_true-pinvA_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(pinvA_true-pinvA_fast)**2)))
timer = Timer(lambda:numpy_solve_A_tilde(f_hat, x, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_A_tilde_algo_1(f_hat, x, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_A_tilde_algo_2')
pinvA_true = numpy_solve_A_tilde(f_hat, x, M)
pinvA_fast = solve_A_tilde_algo_2(f_hat, x, M)
print('approx l0 error:', np.max(np.abs(pinvA_true-pinvA_fast)))
print('approx l1 error:', np.mean(np.abs(pinvA_true-pinvA_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(pinvA_true-pinvA_fast)**2)))
timer = Timer(lambda:numpy_solve_A_tilde(f_hat, x, M))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_A_tilde_algo_2(f_hat, x, M))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_A_algo_1')
sol_true = numpy_solve_A(y, x, M, noise)
sol_fast = solve_A_algo_1(y, x, M, noise)
print('approx l0 error:', np.max(np.abs(sol_true-sol_fast)))
print('approx l1 error:', np.mean(np.abs(sol_true-sol_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(sol_true-sol_fast)**2)))
timer = Timer(lambda:numpy_solve_A(y, x, M, noise))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_A_algo_1(y, x, M, noise))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
print()
print('test of solve_A_algo_2')
sol_true = numpy_solve_A(y, x, M, noise)
sol_fast = solve_A_algo_2(y, x, M, noise)
print('approx l0 error:', np.max(np.abs(sol_true-sol_fast)))
print('approx l1 error:', np.mean(np.abs(sol_true-sol_fast)))
print('approx l2 error:', np.sqrt(np.mean(np.abs(sol_true-sol_fast)**2)))
timer = Timer(lambda:numpy_solve_A(y, x, M, noise))
print('numpy needs ', timer.timeit(time_reps)/time_reps, 's')
timer = Timer(lambda:solve_A_algo_2(y, x, M, noise))
print('our algo needs', timer.timeit(time_reps)/time_reps, 's')
| 39.65625
| 80
| 0.695626
| 954
| 5,076
| 3.474843
| 0.084906
| 0.079638
| 0.072398
| 0.053092
| 0.871795
| 0.849472
| 0.838612
| 0.838612
| 0.831674
| 0.830468
| 0
| 0.015251
| 0.095745
| 5,076
| 127
| 81
| 39.968504
| 0.706972
| 0.01911
| 0
| 0.519231
| 0
| 0
| 0.167048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.048077
| 0
| 0.048077
| 0.538462
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
023fd06b059f100d8f27b42e5b57a947ae6a5e1b
| 114
|
py
|
Python
|
torchdiffeq/__init__.py
|
gaozhihan/torchdiffeq
|
414781617d595ba01cc3f23382e25ab890f4ca66
|
[
"MIT"
] | 1
|
2022-02-22T12:22:48.000Z
|
2022-02-22T12:22:48.000Z
|
torchdiffeq/__init__.py
|
gaozhihan/torchdiffeq
|
414781617d595ba01cc3f23382e25ab890f4ca66
|
[
"MIT"
] | null | null | null |
torchdiffeq/__init__.py
|
gaozhihan/torchdiffeq
|
414781617d595ba01cc3f23382e25ab890f4ca66
|
[
"MIT"
] | null | null | null |
from ._impl import odeint
from ._impl import odeint_adjoint
from .covar import odeint_covar
__version__ = "0.1.1"
| 22.8
| 33
| 0.798246
| 18
| 114
| 4.611111
| 0.5
| 0.433735
| 0.337349
| 0.481928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030303
| 0.131579
| 114
| 4
| 34
| 28.5
| 0.808081
| 0
| 0
| 0
| 0
| 0
| 0.04386
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a177c70518c69d6f3aa18eb96ab4913d997df22
| 9,403
|
py
|
Python
|
3rd_party/RSeQC-2.6.4/lib/qcmodule/annoGene.py
|
genomecuration/JAM
|
8b834d3efe32f79c48887c2797005619ac2c3a1d
|
[
"BSD-3-Clause"
] | null | null | null |
3rd_party/RSeQC-2.6.4/lib/qcmodule/annoGene.py
|
genomecuration/JAM
|
8b834d3efe32f79c48887c2797005619ac2c3a1d
|
[
"BSD-3-Clause"
] | null | null | null |
3rd_party/RSeQC-2.6.4/lib/qcmodule/annoGene.py
|
genomecuration/JAM
|
8b834d3efe32f79c48887c2797005619ac2c3a1d
|
[
"BSD-3-Clause"
] | null | null | null |
import collections
from bx.intervals import *
from qcmodule import BED
'''Compare given bed entry to reference gene model'''
def getCDSExonFromFile(bedfile):
'''Only Extract CDS exon regions from input bed file (must be 12-column).'''
ret_lst=[]
for f in open(bedfile,'r'):
f = f.strip().split()
chrom = f[0]
chrom_start = int(f[1])
name = f[4]
strand = f[5]
cdsStart = int(f[6])
cdsEnd = int(f[7])
blockCount = int(f[9])
blockSizes = [ int(i) for i in f[10].strip(',').split(',') ]
blockStarts = [ chrom_start + int(i) for i in f[11].strip(',').split(',') ]
# grab cdsStart - cdsEnd
cds_exons = []
cds_seq = ''
genome_seq_index = []
chrom = chrom + ':' + strand
for base,offset in zip( blockStarts, blockSizes ):
if (base + offset) < cdsStart: continue
if base > cdsEnd: continue
exon_start = max( base, cdsStart )
exon_end = min( base+offset, cdsEnd )
#cds_exons.append( (exon_start, exon_end) )
ret_lst.append([chrom,exon_start,exon_end])
return ret_lst
def getUTRExonFromFile(bedfile,utr=35):
'''Only Extract UTR regions from input bed file (must be 12-column). output is 6-column bed format.
When utr=35 [default], extract both 5' and 3' UTR. When utr=3, only extract 3' UTR. When utr=5,
only extract 5' UTR'''
ret_lst=[]
for line in open(bedfile,'r'):
if line.startswith('#'):continue
if line.startswith('track'):continue
if line.startswith('browser'):continue
fields=line.rstrip('\r\n').split()
chrom=fields[0]
strand=fields[5]
txStart=int(fields[1])
txEnd=int(fields[2])
cdsStart=int(fields[6])
cdsEnd=int(fields[7])
exon_start=map(int,fields[11].rstrip(',').split(','))
exon_start=map((lambda x: x + txStart),exon_start)
exon_end=map(int,fields[10].rstrip(',').split(','))
exon_end=map((lambda x,y:x+y),exon_start,exon_end)
chrom = chrom + ':' + strand
if (utr==35 or utr==5):
for st,end in zip(exon_start,exon_end):
if st < cdsStart:
utr_st = st
utr_end = min(end,cdsStart)
ret_lst.append([chrom,utr_st,utr_end])
if (utr==35 or utr==3):
for st,end in zip(exon_start,exon_end):
if end > cdsEnd:
utr_st = max(st, cdsEnd)
utr_end = end
ret_lst.append([chrom,utr_st,utr_end])
return ret_lst
def getExonFromFile(bedfile):
'''Extract ALL exon regions from input bed file (must be 12-column). return list of [chrom:+ st end]'''
ret_lst=[]
for line in open(bedfile,'r'):
try:
if line.startswith('#'):continue
if line.startswith('track'):continue
if line.startswith('browser'):continue
fields=line.rstrip('\r\n').split()
txStart=int(fields[1])
chrom=fields[0]
strand=fields[5]
geneName=fields[3]
score=fields[4]
exon_start=map(int,fields[11].rstrip(',').split(','))
exon_start=map((lambda x: x + txStart),exon_start)
exon_end=map(int,fields[10].rstrip(',').split(','))
exon_end=map((lambda x,y:x+y),exon_start,exon_end)
except:
print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line,
continue
chrom = chrom + ':' + strand
for st,end in zip(exon_start,exon_end):
ret_lst.append([chrom,st,end])
return ret_lst
def getExonFromFile2(bedfile):
'''Extract ALL exon regions from input bed file (must be 12-column). return dict'''
ret_dict_full = collections.defaultdict(set)
#ret_dict_inner = collections.defaultdict(set) #trim off start_of_1st_exon and end_of_last_exon
for line in open(bedfile,'r'):
tmp=[]
try:
if line.startswith('#'):continue
if line.startswith('track'):continue
if line.startswith('browser'):continue
fields=line.rstrip('\r\n').split()
txStart=int(fields[1])
chrom=fields[0]
strand=fields[5]
geneName=fields[3]
score=fields[4]
exon_start=map(int,fields[11].rstrip(',').split(','))
exon_start=map((lambda x: x + txStart),exon_start)
exon_end=map(int,fields[10].rstrip(',').split(','))
exon_end=map((lambda x,y:x+y),exon_start,exon_end)
key = chrom + ":" + txstart + "-" + txEnd + ":" + strand + ':' + geneName
except:
print >>sys.stderr,"[NOTE:input bed must be 12-column] skipped this line: " + line,
continue
for st,end in zip(exon_start,exon_end):
tmp.append(exon_start,exon_end)
ret_dict_full[key] = set(tmp)
#ret_dict_inner[key] = set(tmp[1:-1])
return ret_dict_full
def getUTRExonFromLine(bedline,utr=35):
'''Extract UTR regions from input bed line. When utr=35 [default], extract both
5' and 3' UTR. When utr=3, only extract 3' UTR. When utr=5,only extract 5' UTR'''
ret_lst=[]
line = bedline
if line.startswith('#'):return None
if line.startswith('track'):return None
if line.startswith('browser'):return None
if not line.strip():return None
fields=line.rstrip('\r\n').split()
chrom=fields[0]
strand=fields[5]
txStart=int(fields[1])
txEnd=int(fields[2])
cdsStart=int(fields[6])
cdsEnd=int(fields[7])
exon_start=map(int,fields[11].rstrip(',').split(','))
exon_start=map((lambda x: x + txStart),exon_start)
exon_end=map(int,fields[10].rstrip(',').split(','))
exon_end=map((lambda x,y:x+y),exon_start,exon_end)
chrom = chromm + ':' + strand
if (utr==35 or utr==5):
for st,end in zip(exon_start,exon_end):
if st < cdsStart:
utr_st = st
utr_end = min(end,cdsStart)
ret_lst.append([chrom,utr_st,utr_end])
if (utr==35 or utr==3):
for st,end in zip(exon_start,exon_end):
if end > cdsEnd:
utr_st = max(st, cdsEnd)
utr_end = end
ret_lst.append([chrom,utr_st,utr_end])
return ret_lst
def getCDSExonFromLine(bedline):
'''Extract CDS exon regions from input bed line (must be 12-column).'''
ret_lst=[]
line = bedline
if line.startswith('#'):return None
if line.startswith('track'):return None
if line.startswith('browser'):return None
if not line.strip():return None
f = line.strip().split()
chrom = f[0]
chrom_start = int(f[1])
name = f[4]
strand = f[5]
cdsStart = int(f[6])
cdsEnd = int(f[7])
blockCount = int(f[9])
blockSizes = [ int(i) for i in f[10].strip(',').split(',') ]
blockStarts = [ chrom_start + int(i) for i in f[11].strip(',').split(',') ]
# grab cdsStart - cdsEnd
cds_exons = []
cds_seq = ''
genome_seq_index = []
chrom = chromm + ':' + strand
for base,offset in zip( blockStarts, blockSizes ):
if (base + offset) < cdsStart: continue
if base > cdsEnd: continue
exon_start = max( base, cdsStart )
exon_end = min( base+offset, cdsEnd )
#cds_exons.append( (exon_start, exon_end) )
ret_lst.append([chrom,exon_start,exon_end])
return ret_lst
def getExonFromLine(bedline):
'''Extract ALL exon regions from input bed line (must be 12-column). return list of [chrom:+ st end]'''
ret_lst=collections.defaultdict(set)
line = bedline
#if line.startswith('#'):continue
#if line.startswith('track'):continue
#if line.startswith('browser'):continue
fields=line.rstrip('\r\n').split()
txStart=int(fields[1])
chrom=fields[0]
strand=fields[5]
geneName=fields[3]
score=fields[4]
exon_start=map(int,fields[11].rstrip(',').split(','))
exon_start=map((lambda x: x + txStart),exon_start)
exon_end=map(int,fields[10].rstrip(',').split(','))
exon_end=map((lambda x,y:x+y),exon_start,exon_end)
chrom = chrom + ':' + strand
for st,end in zip(exon_start,exon_end):
ret_lst[chrom].add(st)
ret_lst[chrom].add(end)
return ret_lst
def annotateBed(inputbed,refbed,outfile):
'''compare inputbed to refbed'''
ref_exon_ranges = {}
ref_exon_starts = collections.defaultdict(set) #key='chrom:+', value=set()
ref_exon_ends = collections.defaultdict(set)
OF = open(outfile,'w')
#read reference bed file
#Extract CDS exons from reference bed
tmp = getCDSExonFromFile(refbed)
for i in tmp: #chr:+, st, end (0-base)
ref_exon_starts[i[0]].add(int(i[1]))
ref_exon_ends[i[0]].add(int(i[2]))
if i[0] not in ref_exon_ranges:
ref_exon_ranges[i[0]] = Intersecter()
ref_exon_ranges[i[0]].add_interval( Interval( int(i[1]), int(i[2]) ))
#Extract UTR exons from reference bed
tmp = getUTRExonFromFile(refbed)
for i in tmp: #chr:+, st, end (0-base)
ref_exon_starts[i[0]].add(int(i[1]))
ref_exon_ends[i[0]].add(int(i[2]))
if i[0] not in ref_exon_ranges:
ref_exon_ranges[i[0]] = Intersecter()
ref_exon_ranges[i[0]].add_interval( Interval( int(i[1]), int(i[2]) ))
#prepare data structure
ref_exon_chain = getExonFromFile2(refbed)
#read input bed
for line in open(inputbed,'r'):
if line.startswith('#'):continue
if line.startswith('track'):continue
if line.startswith('browser'):continue
if not line.strip(): continue
line = line.strip()
fields=line.split()
chrom = fields[0]
strand = fields[5]
tx_start = int(fields[1])
tx_end = int(fields[2])
key = chrom + ":" +strand
if key in ref_exon_ranges:
if len(ref_exon_ranges[key].find(tx_start,tx_end))==0: #input gene does NOT overlap with any known exons
print line + '\t' + 'novel(intergenic)'
else:
input_exon_chain=getExonFromLine(line)
#print line + '\t' + 'overlap'
#utr_3_exons = getUTRExon(line,utr=3)
#utr_5_exons = getUTRExon(line,utr=5)
#cds_exons = getCDSExon(line)
else:
print line + '\t' + 'unknownChrom'
#for utr3 in utr_3_exons:
# (chrom, st, end) = (utr3[0], int(utr3[1]),int(utr3[2]))
# if chrom in ref_exon_ranges:
# if len(ref_exon_ranges[chrom].find(st,end))>0 : #input exon overlap with known exon
# else:
| 30.529221
| 108
| 0.667766
| 1,486
| 9,403
| 4.103634
| 0.112382
| 0.05018
| 0.046901
| 0.057724
| 0.776976
| 0.757461
| 0.742539
| 0.732699
| 0.724172
| 0.702525
| 0
| 0.020541
| 0.166436
| 9,403
| 308
| 109
| 30.529221
| 0.757464
| 0.100819
| 0
| 0.808889
| 0
| 0
| 0.037756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.013333
| null | null | 0.017778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a1df691b8a3ea861579826ee6d46c8498a2429c
| 146
|
py
|
Python
|
integers/tests/test_palindrome_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
integers/tests/test_palindrome_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | null | null | null |
integers/tests/test_palindrome_number.py
|
ahcode0919/python-ds-algorithms
|
0d617b78c50b6c18da40d9fa101438749bfc82e1
|
[
"MIT"
] | 3
|
2020-10-07T20:24:45.000Z
|
2020-12-16T04:53:19.000Z
|
from integers.palindrome_number import is_palindrome
def test_is_palindrome():
assert is_palindrome(121)
assert not is_palindrome(-121)
| 20.857143
| 52
| 0.794521
| 20
| 146
| 5.5
| 0.55
| 0.436364
| 0.272727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048
| 0.143836
| 146
| 6
| 53
| 24.333333
| 0.832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a450825fd033927d335445fc9fc0cbb3376124e
| 166
|
bzl
|
Python
|
contrib/all_contrib_extensions.bzl
|
algebra2k/envoy
|
46ab91335aa97866f9940eb10d0772b5c29e0db3
|
[
"Apache-2.0"
] | 3
|
2021-08-19T18:42:11.000Z
|
2022-02-20T10:31:30.000Z
|
contrib/all_contrib_extensions.bzl
|
algebra2k/envoy
|
46ab91335aa97866f9940eb10d0772b5c29e0db3
|
[
"Apache-2.0"
] | 312
|
2021-04-19T01:53:05.000Z
|
2022-03-28T08:28:56.000Z
|
contrib/all_contrib_extensions.bzl
|
algebra2k/envoy
|
46ab91335aa97866f9940eb10d0772b5c29e0db3
|
[
"Apache-2.0"
] | 2
|
2019-10-17T01:25:13.000Z
|
2021-09-13T16:25:11.000Z
|
load(":contrib_build_config.bzl", "CONTRIB_EXTENSIONS")
def envoy_all_contrib_extensions():
return [v + "_envoy_extension" for v in CONTRIB_EXTENSIONS.values()]
| 33.2
| 72
| 0.777108
| 22
| 166
| 5.454545
| 0.681818
| 0.425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10241
| 166
| 4
| 73
| 41.5
| 0.805369
| 0
| 0
| 0
| 0
| 0
| 0.355422
| 0.150602
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0.333333
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
5a5727eef1f8c9c91cd74d690d205c7ee21a94a1
| 4,348
|
py
|
Python
|
auth_backends/tests/test_helsinki_azure_ad.py
|
City-of-Helsinki/hkisaml
|
9cc17c3f08da0838ecac8bb3c78c72fb3e3ab4b6
|
[
"MIT"
] | null | null | null |
auth_backends/tests/test_helsinki_azure_ad.py
|
City-of-Helsinki/hkisaml
|
9cc17c3f08da0838ecac8bb3c78c72fb3e3ab4b6
|
[
"MIT"
] | null | null | null |
auth_backends/tests/test_helsinki_azure_ad.py
|
City-of-Helsinki/hkisaml
|
9cc17c3f08da0838ecac8bb3c78c72fb3e3ab4b6
|
[
"MIT"
] | null | null | null |
import json
from auth_backends.tests.azure_ad_base import AzureADV2TenantOAuth2Test, _generate_access_token_body
class TestNoErrorWhenADGroupsNotInTokenOrGraph(AzureADV2TenantOAuth2Test):
backend_path = 'auth_backends.helsinki_azure_ad.HelsinkiAzureADTenantOAuth2'
access_token_body = _generate_access_token_body(extra_payload={
'groups': None,
})
def setup_graph_response(self, **kwargs):
super().setup_graph_response(status=400)
def test_login(self):
user = self.do_login()
self.assertEqual(user.ad_groups.count(), 0)
class TestADGroupsFromGraphResponse(AzureADV2TenantOAuth2Test):
backend_path = 'auth_backends.helsinki_azure_ad.HelsinkiAzureADTenantOAuth2'
access_token_body = _generate_access_token_body(extra_payload={
'_claim_names': {
'groups': 'src1'
},
'_claim_sources': {
'src1': {
'endpoint': (
'https://graph.windows.net/00000000-0000-0000-0000-000000000000'
'/users/00000000-0000-0000-0000-000000000000/getMemberObjects'
)
}
},
})
def setup_graph_response(self, **kwargs):
body = json.dumps({
'@odata.context': 'https://graph.microsoft.com/beta/$metadata#directoryObjects',
'value': [
{
'@odata.type': '#microsoft.graph.group',
'id': '00000000-0000-4000-a0000000000000000',
'displayName': 'first_group',
'securityEnabled': True,
},
{
'@odata.type': '#microsoft.graph.group',
'id': '00000000-0000-4000-a0000000000000001',
'displayName': 'Second group',
'securityEnabled': True,
},
{
'@odata.type': '#microsoft.graph.group',
'id': '00000000-0000-4000-a0000000000000002',
'displayName': 'Third Group',
'securityEnabled': False,
},
],
})
super().setup_graph_response(body=body)
def test_login(self):
user = self.do_login()
self.assertCountEqual(
['first_group', 'second group'],
user.ad_groups.values_list('name', flat=True)
)
class TestADGroupsFromToken(AzureADV2TenantOAuth2Test):
backend_path = 'auth_backends.helsinki_azure_ad.HelsinkiAzureADTenantOAuth2'
access_token_body = _generate_access_token_body(extra_payload={
'groups': [
'In-claim group 1',
'In-claim group 2',
],
})
def setup_graph_response(self, **kwargs):
super().setup_graph_response(status=400)
def test_login(self):
user = self.do_login()
self.assertCountEqual(
['in-claim group 1', 'in-claim group 2'],
user.ad_groups.values_list('name', flat=True)
)
class TestADGroupsFromGraphResponseWhenAlsoTokenHasGroups(AzureADV2TenantOAuth2Test):
backend_path = 'auth_backends.helsinki_azure_ad.HelsinkiAzureADTenantOAuth2'
access_token_body = _generate_access_token_body(extra_payload={
'groups': [
'In-claim group 1',
'In-claim group 2',
],
})
def setup_graph_response(self, **kwargs):
body = json.dumps({
'@odata.context': 'https://graph.microsoft.com/beta/$metadata#directoryObjects',
'value': [
{
'@odata.type': '#microsoft.graph.group',
'id': '00000000-0000-4000-a0000000000000000',
'displayName': 'first_group',
'securityEnabled': True,
},
{
'@odata.type': '#microsoft.graph.group',
'id': '00000000-0000-4000-a0000000000000001',
'displayName': 'Second group',
'securityEnabled': True,
}
],
})
super().setup_graph_response(body=body)
def test_login(self):
user = self.do_login()
self.assertCountEqual(
['first_group', 'second group'],
user.ad_groups.values_list('name', flat=True)
)
| 33.19084
| 100
| 0.562328
| 373
| 4,348
| 6.313673
| 0.233244
| 0.042038
| 0.057325
| 0.048832
| 0.820382
| 0.793206
| 0.793206
| 0.793206
| 0.782166
| 0.763482
| 0
| 0.085792
| 0.321757
| 4,348
| 130
| 101
| 33.446154
| 0.712784
| 0
| 0
| 0.663551
| 1
| 0
| 0.293468
| 0.134775
| 0
| 0
| 0
| 0
| 0.037383
| 1
| 0.074766
| false
| 0
| 0.018692
| 0
| 0.205607
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a807f1441ed6caf5c8d6d792db9b53df16df375
| 143
|
py
|
Python
|
testing-exercises/test_mileage.py
|
vladov3000/epe_tutorials
|
a58f042b6e0f4a55213fa10241eff735118138f1
|
[
"CC-BY-3.0"
] | null | null | null |
testing-exercises/test_mileage.py
|
vladov3000/epe_tutorials
|
a58f042b6e0f4a55213fa10241eff735118138f1
|
[
"CC-BY-3.0"
] | null | null | null |
testing-exercises/test_mileage.py
|
vladov3000/epe_tutorials
|
a58f042b6e0f4a55213fa10241eff735118138f1
|
[
"CC-BY-3.0"
] | null | null | null |
from mileage import convert_mileage
from nose.tools import *
assert_equal (convert_mileage(20),11.76)
assert_equal (convert_mileage(40),5.88)
| 23.833333
| 40
| 0.811189
| 23
| 143
| 4.826087
| 0.608696
| 0.378378
| 0.324324
| 0.45045
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084615
| 0.090909
| 143
| 5
| 41
| 28.6
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
ce6324c1a0218a75ac28d731d71ae6ab24bfa2d4
| 168
|
py
|
Python
|
allennlp_models/vision/predictors/__init__.py
|
shunk031/allennlp-models
|
d37c5fadeef9326808a84dda0bcfd210a078d6b1
|
[
"Apache-2.0"
] | 1
|
2021-11-03T11:34:41.000Z
|
2021-11-03T11:34:41.000Z
|
allennlp_models/vision/predictors/__init__.py
|
staceywhitmore-inl/allennlp-models
|
1e89d5e51cb45f3e77a48d4983bf980088334fac
|
[
"Apache-2.0"
] | 21
|
2020-04-26T12:55:49.000Z
|
2022-03-15T13:10:14.000Z
|
allennlp_models/vision/predictors/__init__.py
|
staceywhitmore-inl/allennlp-models
|
1e89d5e51cb45f3e77a48d4983bf980088334fac
|
[
"Apache-2.0"
] | null | null | null |
from allennlp_models.vision.predictors.vilbert_vqa import VilbertVqaPredictor
from allennlp_models.vision.predictors.visual_entailment import VisualEntailmentPredictor
| 56
| 89
| 0.916667
| 18
| 168
| 8.333333
| 0.666667
| 0.16
| 0.24
| 0.32
| 0.453333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 168
| 2
| 90
| 84
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ce7d10bdca7fb9d6f7236de742ab3e70587aef1c
| 2,488
|
py
|
Python
|
recovery/StepGANSrc/src/models.py
|
imperial-qore/CAROL
|
57dc42c4ddeb9e75eed43a91ceb336a1ecc9c8b9
|
[
"BSD-3-Clause"
] | 1
|
2022-03-19T16:37:40.000Z
|
2022-03-19T16:37:40.000Z
|
recovery/StepGANSrc/src/models.py
|
imperial-qore/CAROL
|
57dc42c4ddeb9e75eed43a91ceb336a1ecc9c8b9
|
[
"BSD-3-Clause"
] | null | null | null |
recovery/StepGANSrc/src/models.py
|
imperial-qore/CAROL
|
57dc42c4ddeb9e75eed43a91ceb336a1ecc9c8b9
|
[
"BSD-3-Clause"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from .constants import *
from .dlutils import *
## Simple SlimGAN model
class StepGAN_simulator_16(nn.Module):
def __init__(self):
super(StepGAN_simulator_16, self).__init__()
self.name = 'StepGAN_simulator_16'
self.lr = 0.0001
self.n_hosts = 16
self.n_feats = 3 * self.n_hosts
self.n_hidden = 32
self.n_window = 1 # SlimGAN w_size = 1
self.n = self.n_window * self.n_feats + self.n_hosts ** 2
self.generator = nn.Sequential(
SlimmableLinear(self.n, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_feats, 1), nn.Sigmoid(),
)
self.discriminator = nn.Sequential(
SlimmableLinear(self.n, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, 1, 1), nn.Sigmoid(),
)
def forward(self, t, s):
## Generate
z = self.generator(torch.cat((t.view(-1), s.view(-1))))
## Discriminator
real_score = self.discriminator(torch.cat((t.view(-1), s.view(-1))).view(1,-1))
fake_score = self.discriminator(torch.cat((z.view(-1), s.view(-1))).view(1,-1))
return z.view(-1), real_score.view(-1), fake_score.view(-1)
## Simple SlimGAN model
class StepGAN_framework_16(nn.Module):
def __init__(self):
super(StepGAN_framework_16, self).__init__()
self.name = 'StepGAN_framework_16'
self.lr = 0.0001
self.n_hosts = 16
self.n_feats = 3 * self.n_hosts
self.n_hidden = 32
self.n_window = 1 # SlimGAN w_size = 1
self.n = self.n_window * self.n_feats + self.n_hosts ** 2
self.generator = nn.Sequential(
SlimmableLinear(self.n, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_feats, 1), nn.Sigmoid(),
)
self.discriminator = nn.Sequential(
SlimmableLinear(self.n, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, self.n_hidden, 1), nn.LeakyReLU(True),
SlimmableLinear(self.n_hidden, 1, 1), nn.Sigmoid(),
)
def forward(self, t, s):
## Generate
z = self.generator(torch.cat((t.view(-1), s.view(-1))))
## Discriminator
real_score = self.discriminator(torch.cat((t.view(-1), s.view(-1))).view(1,-1))
fake_score = self.discriminator(torch.cat((z.view(-1), s.view(-1))).view(1,-1))
return z.view(-1), real_score.view(-1), fake_score.view(-1)
| 38.276923
| 81
| 0.698553
| 399
| 2,488
| 4.175439
| 0.137845
| 0.120048
| 0.118848
| 0.072029
| 0.913565
| 0.877551
| 0.847539
| 0.847539
| 0.807923
| 0.807923
| 0
| 0.036195
| 0.133842
| 2,488
| 65
| 82
| 38.276923
| 0.736891
| 0.050241
| 0
| 0.727273
| 0
| 0
| 0.017029
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072727
| false
| 0
| 0.090909
| 0
| 0.236364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce9b3da4962e81df061984048a784c3e8bdcc82d
| 16,467
|
py
|
Python
|
simba/process_videos_automation.py
|
KonradDanielewski/simba
|
d7a448222e33dcb9880b65c14b5b676933cc6fd7
|
[
"MIT"
] | 172
|
2019-12-18T22:19:42.000Z
|
2022-03-29T01:58:25.000Z
|
simba/process_videos_automation.py
|
KonradDanielewski/simba
|
d7a448222e33dcb9880b65c14b5b676933cc6fd7
|
[
"MIT"
] | 165
|
2020-01-10T19:05:16.000Z
|
2022-03-31T16:08:36.000Z
|
simba/process_videos_automation.py
|
KonradDanielewski/simba
|
d7a448222e33dcb9880b65c14b5b676933cc6fd7
|
[
"MIT"
] | 80
|
2019-12-20T00:01:43.000Z
|
2022-03-29T16:20:10.000Z
|
import os, glob
import cv2
from simba.drop_bp_cords import get_fn_ext
def downsamplevideo_auto(width,height,filesFound,outputdir):
downsamplelist = []
########### DEFINE COMMAND ###########
for i in filesFound:
currentFile = i
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_downsampled.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"' + str(os.path.join(outputdir, os.path.basename(currentFile)))+ '"' + ' -vf scale='+str(width)+':'+ str(height) + ' ' + '"' + str(os.path.join(outputdir, output)) + '"'+ ' -hide_banner' + '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + os.path.dirname(outputdir) + '\\' + 'tmp\"' + '\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' +os.path.dirname(outputdir) +'\\' +'tmp\"' +'\n'
'rename \"' +os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
downsamplelist.append(command)
print('Downsample added into queue')
return downsamplelist
def downsamplevideo_queue(width,height,filesFound,outputdir):
currentFile = filesFound
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_downsampled.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir) + '\\' + os.path.basename(currentFile) + '"'+ ' -vf scale='+str(width)+':'+ str(height) + ' ' + '"'+ str(outputdir) + '\\' + output+ '"' + ' -hide_banner' + '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + (outputdir) + '\\' + 'tmp\"' + '\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + (outputdir) +'\\' +'tmp\"' +'\n'
'rename \"' +os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
print(filesFound,'added into the downsample queue')
return command
def changefps_queue(fps,filesFound,outputdir):
currentFile = filesFound
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_fpsChanged.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir) + '/' + os.path.basename(currentFile) + '"'+ ' -filter:v fps='+ str(fps) + ' ' + '"'+ str(outputdir) + '/' + output+ '"' + ' -hide_banner' + '\n'
'move \"' + str(outputdir) + '/' + os.path.basename(currentFile) + '" "' + (outputdir) + '/' + 'tmp/"' + '\n'
'copy \"' + str(outputdir) + '/' + output + '" "' + (outputdir) +'/' +'tmp/"' +'\n'
'rename \"' +os.path.join(str(outputdir),output) + '" "' + os.path.basename(currentFile)+'"')
print(filesFound,'added into the fps queue')
return command
def greyscale_auto(outputdir,filesFound):
greyscale_list=[]
########### DEFINE COMMAND ###########
for i in filesFound:
currentFile = i
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_grayscale.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir) + '\\' + os.path.basename(currentFile) + '"'+ ' -vf format=gray '+ '"'+ str(outputdir) + '\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' +'\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
greyscale_list.append(command)
print('Grayscale added into queue')
return greyscale_list
def greyscale_queue(outputdir,filesFound):
currentFile = filesFound
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_grayscale.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir) + '\\' + os.path.basename(currentFile)+ '"' + ' -vf format=gray '+ '"'+ str(outputdir) + '\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + (outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + (outputdir)+'\\'+'tmp\"' +'\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
print(filesFound,'added into the grayscale queue')
return command
def superimposeframe_auto(outputdir,filesFound):
superimposeframe_list =[]
########### DEFINE COMMAND ###########
for i in filesFound:
currentFile = i
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_frame_no.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir)+'\\' + os.path.basename(currentFile)+ '"' + ' -vf "drawtext=fontfile=Arial.ttf: text=\'%{frame_num}\': start_number=0: x=(w-tw)/2: y=h-(2*lh): fontcolor=black: fontsize=20: box=1: boxcolor=white: boxborderw=5" -c:a copy ' + '"'+ str(outputdir) + '\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
superimposeframe_list.append(command)
print('Superimpose frame added into queue.')
return superimposeframe_list
def superimposeframe_queue(outputdir,filesFound):
currentFile = filesFound
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_frame_no.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir)+'\\' + os.path.basename(currentFile) + '"'+ ' -vf "drawtext=fontfile=Arial.ttf: text=\'%{frame_num}\': start_number=0: x=(w-tw)/2: y=h-(2*lh): fontcolor=black: fontsize=20: box=1: boxcolor=white: boxborderw=5" -c:a copy '+ '"'+ str(outputdir) + '\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + (outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + (outputdir)+'\\'+'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
print(filesFound,'added into the superimpose frame queue.')
return command
def shortenvideos1_auto(outputdir,filesFound,starttime,endtime):
shortenvideo_list = []
########### DEFINE COMMAND ###########
for i in filesFound:
currentFile = i
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_shorten.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ')+ '"' + str(outputdir)+'\\' + os.path.basename(currentFile)+ '"' + ' -ss ' + str(starttime) +' -to ' + str(endtime) + ' -async 1 '+ '"'+ str(outputdir)+'\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + os.path.dirname(outputdir)+'\\'+'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
shortenvideo_list.append(command)
print('Shorten video added into queue')
return shortenvideo_list
def shortenvideos1_queue(outputdir,filesFound,starttime,endtime):
currentFile = filesFound
outFile = currentFile.replace('.mp4', '')
outFile = str(outFile) + '_shorten.mp4'
output = os.path.basename(outFile)
command = (str('ffmpeg -y -i ') + '"'+ str(outputdir)+'\\' + os.path.basename(currentFile) + '"'+ ' -ss ' + str(starttime) +' -to ' + str(endtime) + ' -async 1 '+ '"'+ str(outputdir)+'\\' + output + '"'+ '\n'
'move \"' + str(outputdir) + '\\' + os.path.basename(currentFile) + '\" \"' + (outputdir)+'\\'+'tmp\"' +'\n'
'copy \"' + str(outputdir) + '\\' + output + '\" \"' + (outputdir)+'\\'+'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir),output) + '\" \"' + os.path.basename(currentFile)+'\"')
print(filesFound,'added into the shorten video queue')
return command
def clahe_auto(directory):
filesFound= []
########### FIND FILES ###########
for i in os.listdir(directory):
if i.__contains__(".mp4"):
filesFound.append(i)
os.chdir(directory)
print('Applying CLAHE, this might take awhile...')
for i in filesFound:
currentVideo = i
saveName = str('CLAHE_') + str(currentVideo[:-4]) + str('.avi')
cap = cv2.VideoCapture(currentVideo)
imageWidth = int(cap.get(3))
imageHeight = int(cap.get(4))
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(saveName, fourcc, fps, (imageWidth, imageHeight), 0)
while True:
ret, image = cap.read()
if ret == True:
im = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
claheFilter = cv2.createCLAHE(clipLimit=2, tileGridSize=(16, 16))
claheCorrecttedFrame = claheFilter.apply(im)
out.write(claheCorrecttedFrame)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
else:
print(str('Completed video ') + str(saveName))
break
cap.release()
out.release()
cv2.destroyAllWindows()
return saveName
def cropvid_auto(filenames,outputdir):
global width,height
#extract one frame
currentDir = str(os.path.dirname(filenames))
videoName = str(os.path.basename(filenames))
os.chdir(currentDir)
cap = cv2.VideoCapture(videoName)
cap.set(1, 0)
ret, frame = cap.read()
fileName = str(0) + str('.bmp')
filePath = os.path.join(currentDir, fileName)
cv2.imwrite(filePath, frame)
#find ROI
img = cv2.imread(filePath)
cv2.namedWindow('Select ROI', cv2.WINDOW_NORMAL)
ROI = cv2.selectROI("Select ROI", img)
width = abs(ROI[0] - (ROI[2] + ROI[0]))
height = abs(ROI[2] - (ROI[3] + ROI[2]))
topLeftX = ROI[0]
topLeftY = ROI[1]
cv2.waitKey(0)
cv2.destroyAllWindows()
#crop video with ffmpeg
fileOut, fileType = videoName.split(".", 2)
fileOutName = str(fileOut) + str('_cropped.mp4')
total = width+height+topLeftX +topLeftY
if total != 0:
command = (str('ffmpeg -y -i ') + str(outputdir) + '\\' + str(videoName) + str(' -vf ') + str('"crop=') + str(width) + ':' + str(height) + ':' + str(topLeftX) + ':' + str(topLeftY) + '" ' + str('-c:v libx264 -c:a copy ') + str(os.path.join(outputdir, fileOutName)) + '\n'
'move \"' + str(outputdir) + '\\' + videoName + '\" \"' + (outputdir) + '\\' + 'tmp\"' + '\n'
'copy \"' + str(outputdir) + '\\' + os.path.basename(fileOutName) + '\" \"' + (outputdir) + '\\' + 'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir), os.path.basename(fileOutName)) + '\" \"' + os.path.basename(videoName) + '\"')
print(videoName,'added into the crop video queue.')
os.remove(filePath)
elif total == 0:
command = []
print('nothing added to the script as no coordinates was selected')
if os.path.exists(filePath):
os.remove(filePath)
return command
def cropvid_queue(filenames,outputdir):
global width,height
#extract one frame
currentDir = str(os.path.dirname(filenames))
videoName = str(os.path.basename(filenames))
os.chdir(currentDir)
cap = cv2.VideoCapture(videoName)
cap.set(1, 0)
ret, frame = cap.read()
fileName = str(0) + str('.bmp')
filePath = os.path.join(currentDir, fileName)
cv2.imwrite(filePath, frame)
#find ROI
img = cv2.imread(filePath)
cv2.namedWindow('Select ROI', cv2.WINDOW_NORMAL)
ROI = cv2.selectROI("Select ROI", img)
width = abs(ROI[0] - (ROI[2] + ROI[0]))
height = abs(ROI[2] - (ROI[3] + ROI[2]))
topLeftX = ROI[0]
topLeftY = ROI[1]
cv2.waitKey(0)
cv2.destroyAllWindows()
#crop video with ffmpeg
fileOut, fileType = videoName.split(".", 2)
fileOutName = str(fileOut) + str('_cropped.mp4')
total = width+height+topLeftX +topLeftY
if total != 0:
command = (str('ffmpeg -y -i ')+ '"' + str(outputdir) + '\\' + str(videoName)+ '"' + str(' -vf ') + str('"crop=') + str(width) + ':' + str(height) + ':' + str(topLeftX) + ':' + str(topLeftY) + '" ' + str('-c:v libx264 -c:a copy ') + '"'+ str(os.path.join(outputdir, fileOutName))+ '"' + '\n'
'move \"' + str(outputdir) + '\\' + videoName + '\" \"' + (outputdir) + '\\' + 'tmp\"' + '\n'
'copy \"' + str(outputdir) + '\\' + os.path.basename(fileOutName) + '\" \"' + (outputdir) + '\\' + 'tmp\"' + '\n'
'rename \"' + os.path.join(str(outputdir), os.path.basename(fileOutName)) + '\" \"' + os.path.basename(videoName) + '\"')
print(videoName, 'added into the crop video queue.')
os.remove(filePath)
return command
else:
print('nothing added to the script as no coordinates was selected')
pass
if os.path.exists(filePath):
os.remove(filePath)
def clahe_batch(directory):
filesFound= []
########### FIND FILES ###########
for i in os.listdir(directory):
filesFound.append(i)
os.chdir(directory)
print('Applying CLAHE, this might take awhile...')
for i in filesFound:
currentVideo = i
saveName = str('CLAHE_') + str(currentVideo[:-4]) + str('.avi')
cap = cv2.VideoCapture(currentVideo)
imageWidth = int(cap.get(3))
imageHeight = int(cap.get(4))
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(saveName, fourcc, fps, (imageWidth, imageHeight), 0)
while True:
ret, image = cap.read()
if ret == True:
im = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
claheFilter = cv2.createCLAHE(clipLimit=2, tileGridSize=(16, 16))
claheCorrecttedFrame = claheFilter.apply(im)
out.write(claheCorrecttedFrame)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
else:
print(str('Completed video ') + str(saveName))
break
cap.release()
out.release()
cv2.destroyAllWindows()
return saveName
def clahe_queue(files):
filesFound= [files]
os.chdir(os.path.dirname(files))
print('Applying CLAHE, this might take awhile...')
for i in filesFound:
currentVideo = os.path.basename(i)
saveName = str('CLAHE_') + str(currentVideo[:-4]) + str('.avi')
cap = cv2.VideoCapture(currentVideo)
imageWidth = int(cap.get(3))
imageHeight = int(cap.get(4))
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(saveName, fourcc, fps, (imageWidth, imageHeight), 0)
while True:
ret, image = cap.read()
if ret == True:
im = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
claheFilter = cv2.createCLAHE(clipLimit=2, tileGridSize=(16, 16))
claheCorrecttedFrame = claheFilter.apply(im)
out.write(claheCorrecttedFrame)
if cv2.waitKey(10) & 0xFF == ord('q'):
break
else:
print(str('Completed video ') + str(saveName))
break
cap.release()
out.release()
cv2.destroyAllWindows()
return saveName
| 47.048571
| 334
| 0.544361
| 1,702
| 16,467
| 5.226792
| 0.119271
| 0.050585
| 0.070818
| 0.075877
| 0.87826
| 0.873089
| 0.873089
| 0.873089
| 0.864546
| 0.862185
| 0
| 0.01286
| 0.258638
| 16,467
| 350
| 335
| 47.048571
| 0.715842
| 0.010931
| 0
| 0.805755
| 0
| 0.007194
| 0.150263
| 0.003671
| 0.014388
| 0
| 0.00076
| 0
| 0
| 1
| 0.05036
| false
| 0.003597
| 0.010791
| 0
| 0.111511
| 0.068345
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0cabb64bfd71300cbe3983c959d1b0ec82f98515
| 3,531
|
py
|
Python
|
Utils/github_workflow_scripts/utils_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 799
|
2016-08-02T06:43:14.000Z
|
2022-03-31T11:10:11.000Z
|
Utils/github_workflow_scripts/utils_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 9,317
|
2016-08-07T19:00:51.000Z
|
2022-03-31T21:56:04.000Z
|
Utils/github_workflow_scripts/utils_test.py
|
diCagri/content
|
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
|
[
"MIT"
] | 1,297
|
2016-08-04T13:59:00.000Z
|
2022-03-31T23:43:06.000Z
|
#!/usr/bin/env python3
import pytest
from utils import get_env_var, EnvVariableError
class TestGetEnvVar(object):
def test_no_env_var(self):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable does not exist
- No 'default_val' argument was passed when the function was called
Then
- Ensure a 'EnvVariableError' exception is raised
"""
with pytest.raises(EnvVariableError):
get_env_var('MADE_UP_ENV_VARIABLE')
def test_empty_env_var(self, monkeypatch):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable's value is an empty string
- No 'default_val' argument was passed when the function was called
Then
- Ensure a 'EnvVariableError' exception is raised
"""
monkeypatch.setenv('MADE_UP_ENV_VARIABLE', '')
with pytest.raises(EnvVariableError):
get_env_var('MADE_UP_ENV_VARIABLE')
def test_no_env_var_with_default(self):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable does not exist
- The 'default_val' argument was passed with a value of 'TIMOTHY'
Then
- Ensure 'TIMOTHY' is returned from the function
"""
default_val = 'TIMOTHY'
env_var_val = get_env_var('MADE_UP_ENV_VARIABLE', default_val)
assert env_var_val == default_val
def test_empty_env_var_with_default(self, monkeypatch):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable's value is an empty string
- The 'default_val' argument was passed with a value of 'TIMOTHY'
Then
- Ensure 'TIMOTHY' is returned from the function
"""
monkeypatch.setenv('MADE_UP_ENV_VARIABLE', '')
default_val = 'TIMOTHY'
env_var_val = get_env_var('MADE_UP_ENV_VARIABLE', default_val)
assert env_var_val == default_val
def test_existing_env_var(self, monkeypatch):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable's value is 'LEROY JENKINS'
- No 'default_val' argument was passed when the function was called
Then
- Ensure 'LEROY JENKINS' is returned from the function
"""
monkeypatch.setenv('MADE_UP_ENV_VARIABLE', 'LEROY JENKINS')
env_var_val = get_env_var('MADE_UP_ENV_VARIABLE')
assert env_var_val == 'LEROY JENKINS'
def test_existing_env_var_with_default(self, monkeypatch):
"""
Scenario: Try getting an environment variable
Given
- Using the 'get_env_var' function
When
- The environment variable's value is 'LEROY JENKINS'
- The 'default_val' argument was passed with a value of 'TIMOTHY'
Then
- Ensure 'LEROY JENKINS' is returned from the function
"""
monkeypatch.setenv('MADE_UP_ENV_VARIABLE', 'LEROY JENKINS')
default_val = 'TIMOTHY'
env_var_val = get_env_var('MADE_UP_ENV_VARIABLE', default_val)
assert env_var_val == 'LEROY JENKINS'
| 30.704348
| 75
| 0.640045
| 435
| 3,531
| 4.942529
| 0.14023
| 0.075349
| 0.054419
| 0.07907
| 0.957209
| 0.926047
| 0.892093
| 0.892093
| 0.892093
| 0.892093
| 0
| 0.0004
| 0.292268
| 3,531
| 114
| 76
| 30.973684
| 0.859944
| 0.461909
| 0
| 0.642857
| 0
| 0
| 0.191579
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.214286
| false
| 0
| 0.071429
| 0
| 0.321429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b3fd2ac3c01c2419c4f3247faa93f3745d3da3d
| 10,180
|
py
|
Python
|
tests/test_auth.py
|
btrappe/pseud
|
421361b0a38eef8307eb083bea672962181fe68a
|
[
"Apache-2.0"
] | 19
|
2015-02-11T13:10:00.000Z
|
2019-12-20T03:10:08.000Z
|
tests/test_auth.py
|
ticosax/pseud
|
0f8690041a3bff02bb265c4212131b9e46e76794
|
[
"Apache-2.0"
] | 27
|
2017-03-07T17:45:09.000Z
|
2022-03-21T20:22:40.000Z
|
tests/test_auth.py
|
btrappe/pseud
|
421361b0a38eef8307eb083bea672962181fe68a
|
[
"Apache-2.0"
] | 4
|
2017-08-31T11:14:26.000Z
|
2021-09-30T19:28:18.000Z
|
import asyncio
import pytest
import zmq
from zmq.utils import z85
from zope.interface.verify import verifyClass
def test_noop_auth_backend_client():
from pseud.auth import NoOpAuthenticationBackendForClient
from pseud.interfaces import IAuthenticationBackend
assert verifyClass(IAuthenticationBackend, NoOpAuthenticationBackendForClient)
def test_noop_auth_backend_server():
from pseud.auth import NoOpAuthenticationBackendForServer
from pseud.interfaces import IAuthenticationBackend
assert verifyClass(IAuthenticationBackend, NoOpAuthenticationBackendForServer)
@pytest.mark.asyncio
async def test_trusted_curve(loop, unused_tcp_port, trusted_curve_auth_backend):
from pseud import Client, Server
from pseud.utils import register_rpc
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{unused_tcp_port}'
server_public, server_secret = zmq.curve_keypair()
security_plugin = 'trusted_curve'
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
bob_public, bob_secret = server.auth_backend.known_identities[b'bob']
client = Client(
server_id,
user_id=b'bob',
security_plugin=security_plugin,
public_key=bob_public,
secret_key=bob_secret,
peer_public_key=server_public,
loop=loop,
)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
register_rpc(name='string.lower')(str.lower)
async with server, client:
result = await client.string.lower('FOO')
assert result == 'foo'
@pytest.mark.asyncio
async def test_trusted_curve_with_wrong_peer_public_key(loop, unused_tcp_port_factory):
from pseud import Client, Server
from pseud.utils import register_rpc
server_id = b'server'
port = unused_tcp_port_factory()
endpoint = f'tcp://127.0.0.1:{port}'
server_public, server_secret = zmq.curve_keypair()
server = Server(
server_id,
security_plugin='trusted_curve',
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
alice_public, alice_secret = server.auth_backend.known_identities[b'alice']
client = Client(
server_id,
user_id=b'alice',
security_plugin='trusted_curve',
public_key=alice_public,
secret_key=alice_secret,
peer_public_key=z85.encode(b'R' * 32),
timeout=0.5,
loop=loop,
)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
register_rpc(name='string.lower')(str.lower)
async with server, client:
with pytest.raises(asyncio.TimeoutError):
await client.string.lower('BAR')
@pytest.mark.asyncio
async def test_untrusted_curve_with_allowed_password(
loop, unused_tcp_port, untrusted_curve_auth_backend
):
from pseud import Client, Server
from pseud.utils import register_rpc
client_id = b'john'
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{unused_tcp_port}'
server_public, server_secret = zmq.curve_keypair()
client_public, client_secret = zmq.curve_keypair()
security_plugin = 'untrusted_curve'
password = b's3cret!'
client = Client(
server_id,
security_plugin=security_plugin,
public_key=client_public,
secret_key=client_secret,
peer_public_key=server_public,
user_id=client_id,
password=password,
loop=loop,
)
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
# configure manually authentication backend
server.auth_backend.user_map[client_id] = password
register_rpc(name='string.lower')(str.lower)
async with server, client:
result = await client.string.lower('FOO')
result2 = await client.string.lower('FOO_JJ')
result3 = await server.send_to(client_id).string.lower('ABC')
assert result == 'foo'
assert result2 == 'foo_jj'
assert result3 == 'abc'
@pytest.mark.asyncio
async def test_untrusted_curve_with_allowed_password_and_client_disconnect(
loop, unused_tcp_port
):
from pseud import Client, Server
client_id = b'john'
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{unused_tcp_port}'
server_public, server_secret = zmq.curve_keypair()
client_public, client_secret = zmq.curve_keypair()
security_plugin = 'untrusted_curve'
password = b's3cret!'
client = Client(
server_id,
security_plugin=security_plugin,
public_key=client_public,
secret_key=client_secret,
peer_public_key=server_public,
user_id=client_id,
password=password,
timeout=1,
loop=loop,
)
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
# configure manually authentication backend
server.auth_backend.user_map[client_id] = password
server.register_rpc(name='string.lower')(str.lower)
async with server, client:
result = await client.string.lower('FOO')
assert result == 'foo'
# Simulate disconnection and reconnection with new identity
client.disconnect(endpoint)
client.connect(endpoint)
await asyncio.sleep(0.1)
result = await client.string.lower('ABC')
assert result == 'abc'
@pytest.mark.asyncio
async def test_untrusted_curve_with_wrong_password(loop, unused_tcp_port):
from pseud import Client, Server
from pseud.interfaces import UnauthorizedError
from pseud.utils import register_rpc
client_id = b'john'
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{unused_tcp_port}'
server_public, server_secret = zmq.curve_keypair()
client_public, client_secret = zmq.curve_keypair()
security_plugin = 'untrusted_curve'
password = b's3cret!'
client = Client(
server_id,
user_id=client_id,
security_plugin=security_plugin,
public_key=client_public,
secret_key=client_secret,
peer_public_key=server_public,
password=password,
loop=loop,
)
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
# configure manually authentication backend
server.auth_backend.user_map[client_id] = password + b'Looser'
register_rpc(name='string.lower')(str.lower)
async with server, client:
with pytest.raises(UnauthorizedError):
await client.string.lower(b'IMSCREAMING')
@pytest.mark.asyncio
async def test_client_can_reconnect(loop, unused_tcp_port_factory):
from pseud import Client, Server
port = unused_tcp_port_factory()
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{port}'
server_public, server_secret = zmq.curve_keypair()
security_plugin = 'trusted_curve'
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
bob_public, bob_secret = server.auth_backend.known_identities[b'bob']
client = Client(
server_id,
user_id=b'bob',
security_plugin=security_plugin,
public_key=bob_public,
secret_key=bob_secret,
peer_public_key=server_public,
loop=loop,
)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
server.register_rpc(name='string.upper')(str.upper)
async with server, client:
result = await client.string.upper('hello')
assert result == 'HELLO'
client.disconnect(endpoint)
client.connect(endpoint)
await asyncio.sleep(0.01)
result = await client.string.upper('hello2')
assert result == 'HELLO2'
@pytest.mark.asyncio
async def test_server_can_send_to_trustable_peer_identity(loop, unused_tcp_port):
"""
Uses internal metadata of zmq.Frame.get() to fetch identity of sender
"""
from pseud import Client, Server
server_id = b'server'
endpoint = f'tcp://127.0.0.1:{unused_tcp_port}'
server_public, server_secret = zmq.curve_keypair()
security_plugin = 'trusted_curve'
server = Server(
server_id,
security_plugin=security_plugin,
public_key=server_public,
secret_key=server_secret,
loop=loop,
)
server.bind(endpoint)
bob_public, bob_secret = server.auth_backend.known_identities[b'bob']
client = Client(
server_id,
user_id=b'bob',
security_plugin=security_plugin,
public_key=bob_public,
secret_key=bob_secret,
peer_public_key=server_public,
loop=loop,
)
client.connect(endpoint)
assert server.socket.mechanism == zmq.CURVE
assert client.socket.mechanism == zmq.CURVE
@server.register_rpc(with_identity=True)
def echo(peer_identity, message):
return peer_identity, message
async with server, client:
result = await client.echo(b'one')
if zmq.zmq_version_info() >= (4, 1, 0):
assert result == (b'bob', b'one')
else:
assert result == (b'', b'one')
| 28.920455
| 87
| 0.68222
| 1,246
| 10,180
| 5.326645
| 0.101124
| 0.0675
| 0.027422
| 0.048516
| 0.841193
| 0.805183
| 0.776104
| 0.759078
| 0.714178
| 0.700919
| 0
| 0.008635
| 0.226424
| 10,180
| 351
| 88
| 29.002849
| 0.834159
| 0.017976
| 0
| 0.753571
| 0
| 0
| 0.059655
| 0.021096
| 0
| 0
| 0
| 0
| 0.092857
| 1
| 0.010714
| false
| 0.042857
| 0.075
| 0.003571
| 0.089286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f03b46bebadd84838c835bef4978da9644051ec1
| 7,735
|
py
|
Python
|
pyemittance/emit_eval_example.py
|
slaclab/PyEmittance
|
5daf371dcd8f4511b2808d4829a0911312f953e3
|
[
"MIT"
] | 4
|
2022-02-15T19:29:27.000Z
|
2022-02-19T03:13:08.000Z
|
pyemittance/emit_eval_example.py
|
slaclab/PyEmittance
|
5daf371dcd8f4511b2808d4829a0911312f953e3
|
[
"MIT"
] | null | null | null |
pyemittance/emit_eval_example.py
|
slaclab/PyEmittance
|
5daf371dcd8f4511b2808d4829a0911312f953e3
|
[
"MIT"
] | null | null | null |
from pyemittance.observer import Observer
from pyemittance.data_handler import adapt_range, check_symmetry, find_inflection_pnt, add_measurements_btwn_pnts
from pyemittance.emittance_calc import EmitCalc
# Sample emittance scan function for machine and injector surrogate
def eval_emit_machine(config,
quad_init = [-6, -4, -2, 0],
online = False,
name = 'LCLS',
meas_type = 'OTRS',
adapt_ranges = True,
num_points = 7,
check_sym = True,
infl_check = True,
add_pnts = True,
show_plots = False,
use_prev_meas = True,
quad_tol = 0.05,
save_runs = False,
calc_bmag = False):
# get initial points from the observer
o = Observer([], {'x': [], 'y': []}, {'x': [], 'y': []})
o.use_model = False
o.config = config
o.online = online
o.name = name
o.meas_type = meas_type
o.use_prev_meas = use_prev_meas
o.tolerance = quad_tol
# get initial beamsizes (rough scan)
bs_x_list, bs_y_list, bs_x_list_err, bs_y_list_err = o.measure_beam(quad_init)
quad_range_x = quad_init
quad_range_y = quad_init
if adapt_ranges:
quad_range_x = adapt_range(quad_range_x, bs_x_list, 'x', w=bs_x_list_err, num_points=num_points)
quad_range_y = adapt_range(quad_range_y, bs_y_list, 'y', w=bs_y_list_err, num_points=num_points)
new_beamsize_x = o.measure_beam(quad_range_x)
bs_x_list, bs_x_list_err = new_beamsize_x[0], new_beamsize_x[2]
new_beamsize_y = o.measure_beam(quad_range_y)
bs_y_list, bs_y_list_err = new_beamsize_y[1], new_beamsize_y[3]
else:
quad_range_x = quad_init
quad_range_y = quad_init
if check_sym:
add_points_x = check_symmetry(quad_range_x, bs_x_list, bs_x_list_err, 'x',
bs_fn=o.measure_beam, add_meas=True)
add_points_y = check_symmetry(quad_range_y, bs_y_list, bs_y_list_err, 'y',
bs_fn=o.measure_beam, add_meas=True)
if add_points_x is not None:
quad_range_x = add_points_x[0]
bs_x_list = add_points_x[1]
bs_x_list_err = add_points_x[2]
if add_points_y is not None:
quad_range_y = add_points_y[0]
bs_y_list = add_points_y[1]
bs_y_list_err = add_points_y[2]
if infl_check:
left_x, right_x = find_inflection_pnt(quad_range_x, bs_x_list, show_plots=show_plots)
left_y, right_y = find_inflection_pnt(quad_range_y, bs_y_list, show_plots=show_plots)
# truncate data
quad_range_x = quad_range_x[left_x:right_x]
bs_x_list = bs_x_list[left_x:right_x]
bs_x_list_err = bs_x_list_err[left_x:right_x]
quad_range_y = quad_range_y[left_y:right_y]
bs_y_list = bs_y_list[left_y:right_y]
bs_y_list_err = bs_y_list_err[left_y:right_y]
if add_pnts:
quad_range_x, bs_x_list, bs_x_list_err = add_measurements_btwn_pnts(quad_range_x, bs_x_list, bs_x_list_err,
num_points, 'x', bs_fn=o.measure_beam)
quad_range_y, bs_y_list, bs_y_list_err = add_measurements_btwn_pnts(quad_range_y, bs_y_list, bs_y_list_err,
num_points, 'y', bs_fn=o.measure_beam)
# finally get emittance
ef = EmitCalc({'x': quad_range_x, 'y': quad_range_y},
{'x': bs_x_list, 'y': bs_y_list},
{'x': bs_x_list_err, 'y': bs_y_list_err}
)
ef.plot = show_plots
ef.save_runs = save_runs
ef.calc_bmag = calc_bmag
# get normalized transverse emittance
ef.get_emit()
# get geom mean of normalized emittances
ef.get_gmean_emit()
total_points_measured = len(o.quad_meas)
return ef.out_dict, total_points_measured
def eval_emit_surrogate(get_bs_model,
config,
quad_init = [-6, -4, -2, 0],
adapt_ranges = True,
num_points = 7,
check_sym = True,
infl_check = True,
add_pnts = True,
show_plots = False,
add_noise = False,
save_runs= False,
calc_bmag = False):
# get initial points from the observer
o = Observer([], {'x': [], 'y': []}, {'x': [], 'y': []})
o.use_model = True
# set beamsize fn for MODEL
o.get_beamsizes_model = get_bs_model
o.config = config
o.add_noise = add_noise
# get initial beamsizes (rough scan)
bs_x_list, bs_y_list, bs_x_list_err, bs_y_list_err = o.measure_beam(quad_init)
quad_range_x = quad_init
quad_range_y = quad_init
if adapt_ranges:
quad_range_x = adapt_range(quad_range_x, bs_x_list, 'x', w=bs_x_list_err, num_points=num_points)
quad_range_y = adapt_range(quad_range_y, bs_y_list, 'y', w=bs_y_list_err, num_points=num_points)
new_beamsize_x = o.measure_beam(quad_range_x)
bs_x_list, bs_x_list_err = new_beamsize_x[0], new_beamsize_x[2]
new_beamsize_y = o.measure_beam(quad_range_y)
bs_y_list, bs_y_list_err = new_beamsize_y[1], new_beamsize_y[3]
if check_sym:
add_points_x = check_symmetry(quad_range_x, bs_x_list, bs_x_list_err, 'x',
bs_fn=o.measure_beam, add_meas=True)
add_points_y = check_symmetry(quad_range_y, bs_y_list, bs_y_list_err, 'y',
bs_fn=o.measure_beam, add_meas=True)
if add_points_x is not None:
quad_range_x = add_points_x[0]
bs_x_list = add_points_x[1]
bs_x_list_err = add_points_x[2]
if add_points_y is not None:
quad_range_y = add_points_y[0]
bs_y_list = add_points_y[1]
bs_y_list_err = add_points_y[2]
if infl_check:
left_x, right_x = find_inflection_pnt(quad_range_x, bs_x_list, show_plots=show_plots)
left_y, right_y = find_inflection_pnt(quad_range_y, bs_y_list, show_plots=show_plots)
# truncate data
quad_range_x = quad_range_x[left_x:right_x]
bs_x_list = bs_x_list[left_x:right_x]
bs_x_list_err = bs_x_list_err[left_x:right_x]
quad_range_y = quad_range_y[left_y:right_y]
bs_y_list = bs_y_list[left_y:right_y]
bs_y_list_err = bs_y_list_err[left_y:right_y]
if add_pnts:
quad_range_x, bs_x_list, bs_x_list_err = add_measurements_btwn_pnts(quad_range_x, bs_x_list, bs_x_list_err,
num_points, 'x', bs_fn=o.measure_beam)
quad_range_y, bs_y_list, bs_y_list_err = add_measurements_btwn_pnts(quad_range_y, bs_y_list, bs_y_list_err,
num_points, 'y', bs_fn=o.measure_beam)
# finally get emittance
ef = EmitCalc({'x': quad_range_x, 'y': quad_range_y},
{'x': bs_x_list, 'y': bs_y_list},
{'x': bs_x_list_err, 'y': bs_y_list_err}
)
ef.plot = show_plots
ef.save_runs = save_runs
ef.calc_bmag = calc_bmag
# get normalized transverse emittance
ef.get_emit()
# get geom mean of normalized emittances
ef.get_gmean_emit()
total_points_measured = len(o.quad_meas)
return ef.out_dict, total_points_measured
| 40.07772
| 115
| 0.600259
| 1,179
| 7,735
| 3.450382
| 0.091603
| 0.110619
| 0.072271
| 0.049164
| 0.869223
| 0.869223
| 0.869223
| 0.860374
| 0.860374
| 0.860374
| 0
| 0.006259
| 0.318423
| 7,735
| 193
| 116
| 40.07772
| 0.765364
| 0.059082
| 0
| 0.830986
| 0
| 0
| 0.005507
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014085
| false
| 0
| 0.021127
| 0
| 0.049296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f052ab7c875081fb34848a7990466f3c90971d98
| 9,002
|
py
|
Python
|
tests/draw/test_leader.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 4,512
|
2015-01-02T16:40:59.000Z
|
2022-03-31T17:26:28.000Z
|
tests/draw/test_leader.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 1,420
|
2015-01-07T21:17:01.000Z
|
2022-03-31T10:23:45.000Z
|
tests/draw/test_leader.py
|
rianmcguire/WeasyPrint
|
7e400663236d16121e14cf3183ce53828d056092
|
[
"BSD-3-Clause"
] | 640
|
2015-01-30T18:07:09.000Z
|
2022-03-24T20:17:42.000Z
|
"""
weasyprint.tests.test_draw.test_leader
--------------------------------------
Test how leaders are drawn.
"""
import pytest
from ..testing_utils import assert_no_logs
from . import assert_pixels
@assert_no_logs
def test_leader_simple():
expected_pixels = '''
RR__BBBBBBBB__BB
RR__BBBBBBBB__BB
RRRR__BBBB__BBBB
RRRR__BBBB__BBBB
RR__BBBB__BBBBBB
RR__BBBB__BBBBBB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 6px;
}
body {
color: red;
counter-reset: count;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: ' ' leader(dotted) ' ' counter(count, lower-roman);
counter-increment: count;
}
</style>
<div>a</div>
<div>bb</div>
<div>c</div>
'''
assert_pixels('leader-simple', 16, 6, expected_pixels, html)
@assert_no_logs
def test_leader_too_long():
expected_pixels = '''
RRRRRRRRRR______
RRRRRRRRRR______
BBBBBBBBBBBB__BB
BBBBBBBBBBBB__BB
RR__RR__RR__RR__
RR__RR__RR__RR__
RR__RR__RR______
RR__RR__RR______
BBBBBBBBBB__BBBB
BBBBBBBBBB__BBBB
RR__RR__RR__RR__
RR__RR__RR__RR__
RR__BBBB__BBBBBB
RR__BBBB__BBBBBB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 14px;
}
body {
color: red;
counter-reset: count;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: ' ' leader(dotted) ' ' counter(count, lower-roman);
counter-increment: count;
}
</style>
<div>aaaaa</div>
<div>a a a a a a a</div>
<div>a a a a a</div>
'''
assert_pixels('leader-too-long', 16, 14, expected_pixels, html)
@assert_no_logs
def test_leader_alone():
expected_pixels = '''
RRBBBBBBBBBBBBBB
RRBBBBBBBBBBBBBB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 2px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: leader(dotted);
}
</style>
<div>a</div>
'''
assert_pixels('leader-alone', 16, 2, expected_pixels, html)
@assert_no_logs
def test_leader_content():
expected_pixels = '''
RR____BB______BB
RR____BB______BB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 2px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: leader(' . ') 'a';
}
</style>
<div>a</div>
'''
assert_pixels('leader-content', 16, 2, expected_pixels, html)
@pytest.mark.xfail
@assert_no_logs
def test_leader_float():
expected_pixels = '''
bbGRR___BB____BB
bbGRR___BB____BB
GGGRR___BB____BB
___RR___BB____BB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 4px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
article {
background: lime;
color: navy;
float: left;
height: 3px;
width: 3px;
}
div::after {
color: blue;
content: leader('. ') 'a';
}
</style>
<div>a<article>a</article></div>
<div>a</div>
'''
assert_pixels('leader-float', 16, 4, expected_pixels, html)
@assert_no_logs
def test_leader_in_inline():
expected_pixels = '''
RR__GGBBBBBB__RR
RR__GGBBBBBB__RR
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 2px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
span {
color: lime;
}
span::after {
color: blue;
content: leader('-');
}
</style>
<div>a <span>a</span> a</div>
'''
assert_pixels('leader-in-inline', 16, 2, expected_pixels, html)
@pytest.mark.xfail
@assert_no_logs
def test_leader_bad_alignment():
expected_pixels = '''
RRRRRR__________
RRRRRR__________
______BB______RR
______BB______RR
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 4px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: leader(' - ') 'a';
}
</style>
<div>aaa</div>
'''
assert_pixels('leader-in-inline', 16, 4, expected_pixels, html)
@assert_no_logs
def test_leader_simple_rtl():
expected_pixels = '''
BB__BBBBBBBB__RR
BB__BBBBBBBB__RR
BBBB__BBBB__RRRR
BBBB__BBBB__RRRR
BBBBBB__BBBB__RR
BBBBBB__BBBB__RR
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 6px;
}
body {
color: red;
counter-reset: count;
direction: rtl;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
/* RTL Mark used in second space */
content: ' ' leader(dotted) ' ' counter(count, lower-roman);
counter-increment: count;
}
</style>
<div>a</div>
<div>bb</div>
<div>c</div>
'''
assert_pixels('leader-simple-rtl', 16, 6, expected_pixels, html)
@assert_no_logs
def test_leader_too_long_rtl():
expected_pixels = '''
______RRRRRRRRRR
______RRRRRRRRRR
BB__BBBBBBBBBBBB
BB__BBBBBBBBBBBB
__RR__RR__RR__RR
__RR__RR__RR__RR
______RR__RR__RR
______RR__RR__RR
BBBB__BBBBBBBBBB
BBBB__BBBBBBBBBB
__RR__RR__RR__RR
__RR__RR__RR__RR
BBBBBB__BBBB__RR
BBBBBB__BBBB__RR
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 14px;
}
body {
color: red;
counter-reset: count;
direction: rtl;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
/* RTL Mark used in second space */
content: ' ' leader(dotted) ' ' counter(count, lower-roman);
counter-increment: count;
}
</style>
<div>aaaaa</div>
<div>a a a a a a a</div>
<div>a a a a a</div>
'''
assert_pixels('leader-too-long-rtl', 16, 14, expected_pixels, html)
@assert_no_logs
def test_leader_float_leader():
# Test regression: https://github.com/Kozea/WeasyPrint/issues/1409
# Leaders in floats are not displayed at all in many cases with the current
# implementation, and this case is not really specified. So…
expected_pixels = '''
RR____________BB
RR____________BB
RRRR__________BB
RRRR__________BB
RR____________BB
RR____________BB
'''
html = '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {
background: white;
size: 16px 6px;
}
body {
color: red;
font-family: weasyprint;
font-size: 2px;
line-height: 1;
}
div::after {
color: blue;
content: leader(' . ') 'a';
float: right;
}
</style>
<div>a</div>
<div>bb</div>
<div>c</div>
'''
assert_pixels('leader-float-leader', 16, 6, expected_pixels, html)
| 23.565445
| 79
| 0.519551
| 946
| 9,002
| 4.460888
| 0.135307
| 0.03981
| 0.052607
| 0.062559
| 0.77654
| 0.75237
| 0.746446
| 0.709479
| 0.705687
| 0.695498
| 0
| 0.015756
| 0.365474
| 9,002
| 381
| 80
| 23.627297
| 0.722164
| 0.033881
| 0
| 0.790831
| 0
| 0
| 0.815541
| 0.003689
| 0
| 0
| 0
| 0
| 0.063037
| 1
| 0.028653
| false
| 0
| 0.008596
| 0
| 0.037249
| 0.057307
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f0602aba208434c38bfdf363d5bb1b098e703f86
| 79,064
|
py
|
Python
|
lib/schema.py
|
hep-gc/cloudscheduler
|
2f6afab61e2404920b33279b6ebfbc09c7bddfa1
|
[
"Apache-2.0"
] | 3
|
2020-03-03T03:25:36.000Z
|
2021-12-03T15:31:39.000Z
|
lib/schema.py
|
hep-gc/cloudscheduler
|
2f6afab61e2404920b33279b6ebfbc09c7bddfa1
|
[
"Apache-2.0"
] | 341
|
2017-06-08T17:27:59.000Z
|
2022-01-28T19:37:57.000Z
|
lib/schema.py
|
hep-gc/cloudscheduler
|
2f6afab61e2404920b33279b6ebfbc09c7bddfa1
|
[
"Apache-2.0"
] | 3
|
2018-04-25T16:13:20.000Z
|
2020-04-15T20:03:46.000Z
|
schema = {
"apel_accounting": {
"keys": [
"group_name",
"cloud_name",
"vmid"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"hostname": {"type": "str", "len": "128", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "32", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "YES"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"image_id": {"type": "str", "len": "128", "nulls": "YES"},
"benchmark_type": {"type": "str", "len": "32", "nulls": "YES"},
"benchmark": {"type": "int"},
"start_time": {"type": "int"},
"end_time": {"type": "int"},
"last_update": {"type": "int"},
"cpu_time": {"type": "int"},
"network_type": {"type": "str", "len": "32", "nulls": "YES"},
"rx": {"type": "int"},
"tx": {"type": "int"}
}
},
"archived_condor_jobs": {
"keys": [
"global_job_id"
],
"columns": {
"global_job_id": {"type": "str", "len": "128", "nulls": "NO"},
"group_name": {"type": "str", "len": "128", "nulls": "YES"},
"target_clouds": {"type": "str", "nulls": "YES"},
"cloud_name": {"type": "str", "nulls": "YES"},
"job_status": {"type": "int"},
"request_cpus": {"type": "int"},
"request_ram": {"type": "int"},
"request_disk": {"type": "int"},
"request_swap": {"type": "int"},
"request_scratch": {"type": "int"},
"requirements": {"type": "str", "len": "512", "nulls": "YES"},
"job_priority": {"type": "int"},
"cluster_id": {"type": "int"},
"proc_id": {"type": "int"},
"user": {"type": "str", "len": "512", "nulls": "YES"},
"image": {"type": "str", "nulls": "YES"},
"instance_type": {"type": "str", "len": "512", "nulls": "YES"},
"network": {"type": "str", "len": "512", "nulls": "YES"},
"keep_alive": {"type": "str", "len": "512", "nulls": "YES"},
"max_price": {"type": "str", "len": "512", "nulls": "YES"},
"user_data": {"type": "str", "len": "512", "nulls": "YES"},
"job_per_core": {"type": "int"},
"entered_current_status": {"type": "int"},
"q_date": {"type": "int"},
"hold_job_reason": {"type": "str", "len": "64", "nulls": "YES"},
"held_reason": {"type": "str", "len": "64", "nulls": "YES"}
}
},
"archived_condor_machines": {
"keys": [
"name"
],
"columns": {
"name": {"type": "str", "len": "128", "nulls": "NO"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"condor_host": {"type": "str", "len": "64", "nulls": "YES"},
"flavor": {"type": "str", "len": "32", "nulls": "YES"},
"job_id": {"type": "str", "len": "128", "nulls": "YES"},
"global_job_id": {"type": "str", "len": "128", "nulls": "YES"},
"address": {"type": "str", "len": "512", "nulls": "YES"},
"state": {"type": "str", "len": "128", "nulls": "YES"},
"activity": {"type": "str", "len": "128", "nulls": "YES"},
"vm_type": {"type": "str", "len": "128", "nulls": "YES"},
"my_current_time": {"type": "int"},
"entered_current_state": {"type": "int"},
"start": {"type": "str", "len": "128", "nulls": "YES"},
"remote_owner": {"type": "str", "len": "128", "nulls": "YES"},
"slot_type": {"type": "str", "len": "128", "nulls": "YES"},
"total_slots": {"type": "int"},
"idle_time": {"type": "int"},
"retire_request_time": {"type": "int"},
"retired_time": {"type": "int"}
}
},
"auth_group": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"name": {"type": "str", "len": "80", "nulls": "NO"}
}
},
"auth_group_permissions": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"group_id": {"type": "int"},
"permission_id": {"type": "int"}
}
},
"auth_permission": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"name": {"type": "str", "len": "255", "nulls": "NO"},
"content_type_id": {"type": "int"},
"codename": {"type": "str", "len": "100", "nulls": "NO"}
}
},
"auth_user": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"password": {"type": "str", "len": "128", "nulls": "NO"},
"last_login": {"type": "str", "nulls": "YES"},
"is_superuser": {"type": "int"},
"username": {"type": "str", "len": "150", "nulls": "NO"},
"first_name": {"type": "str", "len": "30", "nulls": "NO"},
"last_name": {"type": "str", "len": "150", "nulls": "NO"},
"email": {"type": "str", "len": "254", "nulls": "NO"},
"is_staff": {"type": "int"},
"is_active": {"type": "int"},
"date_joined": {"type": "str", "nulls": "NO"}
}
},
"auth_user_groups": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"user_id": {"type": "int"},
"group_id": {"type": "int"}
}
},
"auth_user_user_permissions": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"user_id": {"type": "int"},
"permission_id": {"type": "int"}
}
},
"cloud_flavors": {
"keys": [
"group_name",
"cloud_name",
"id"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"name": {"type": "str", "len": "128", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"ram": {"type": "int"},
"cores": {"type": "int"},
"swap": {"type": "int"},
"disk": {"type": "int"},
"ephemeral_disk": {"type": "int"},
"is_public": {"type": "int"},
"last_updated": {"type": "int"}
}
},
"cloud_images": {
"keys": [
"group_name",
"cloud_name",
"id"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"container_format": {"type": "str", "len": "128", "nulls": "YES"},
"disk_format": {"type": "str", "len": "128", "nulls": "YES"},
"name": {"type": "str", "len": "256", "nulls": "YES"},
"size": {"type": "int"},
"visibility": {"type": "str", "len": "128", "nulls": "YES"},
"min_disk": {"type": "int"},
"min_ram": {"type": "int"},
"checksum": {"type": "str", "len": "64", "nulls": "YES"},
"created_at": {"type": "str", "len": "32", "nulls": "YES"},
"last_updated": {"type": "int"}
}
},
"cloud_keypairs": {
"keys": [
"group_name",
"cloud_name",
"fingerprint",
"key_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"fingerprint": {"type": "str", "len": "64", "nulls": "NO"},
"key_name": {"type": "str", "len": "64", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"}
}
},
"cloud_limits": {
"keys": [
"group_name",
"cloud_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"server_meta_max": {"type": "int"},
"instances_max": {"type": "int"},
"personality_max": {"type": "int"},
"image_meta_max": {"type": "int"},
"personality_size_max": {"type": "int"},
"ram_max": {"type": "int"},
"server_groups_max": {"type": "int"},
"security_group_rules_max": {"type": "int"},
"keypairs_max": {"type": "int"},
"security_groups_max": {"type": "int"},
"server_group_members_max": {"type": "int"},
"floating_ips_max": {"type": "int"},
"cores_max": {"type": "int"},
"server_groups_used": {"type": "int"},
"instances_used": {"type": "int"},
"ram_used": {"type": "int"},
"security_groups_used": {"type": "int"},
"floating_ips_used": {"type": "int"},
"cores_used": {"type": "int"},
"volumes_max": {"type": "int"},
"volumes_used": {"type": "int"},
"volume_gigs_max": {"type": "int"},
"volume_gigs_used": {"type": "int"},
"last_updated": {"type": "int"}
}
},
"cloud_networks": {
"keys": [
"group_name",
"cloud_name",
"id"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"name": {"type": "str", "len": "256", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"subnets": {"type": "str", "len": "256", "nulls": "YES"},
"tenant_id": {"type": "str", "len": "128", "nulls": "YES"},
"external_route": {"type": "int"},
"shared": {"type": "int"},
"last_updated": {"type": "int"}
}
},
"cloud_security_groups": {
"keys": [
"group_name",
"cloud_name",
"id"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "64", "nulls": "NO"},
"name": {"type": "str", "len": "128", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"last_updated": {"type": "int"}
}
},
"cloud_volumes": {
"keys": [
"group_name",
"cloud_name",
"id"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"name": {"type": "str", "len": "256", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"size": {"type": "int"},
"volume_type": {"type": "str", "len": "64", "nulls": "NO"},
"status": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "32", "nulls": "NO"},
"created_at": {"type": "int"},
"last_updated": {"type": "int"}
}
},
"condor_jobs": {
"keys": [
"global_job_id"
],
"columns": {
"global_job_id": {"type": "str", "len": "128", "nulls": "NO"},
"htcondor_host_id": {"type": "int"},
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"target_clouds": {"type": "str", "nulls": "YES"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"job_status": {"type": "int"},
"request_cpus": {"type": "int"},
"request_ram": {"type": "int"},
"request_disk": {"type": "int"},
"request_swap": {"type": "int"},
"request_scratch": {"type": "int"},
"requirements": {"type": "str", "len": "512", "nulls": "YES"},
"job_priority": {"type": "int"},
"cluster_id": {"type": "int"},
"proc_id": {"type": "int"},
"user": {"type": "str", "len": "512", "nulls": "YES"},
"image": {"type": "str", "nulls": "YES"},
"instance_type": {"type": "str", "len": "512", "nulls": "YES"},
"network": {"type": "str", "len": "512", "nulls": "YES"},
"keep_alive": {"type": "str", "len": "512", "nulls": "YES"},
"max_price": {"type": "str", "len": "512", "nulls": "YES"},
"user_data": {"type": "str", "len": "512", "nulls": "YES"},
"job_per_core": {"type": "int"},
"entered_current_status": {"type": "int"},
"q_date": {"type": "int"},
"hold_reason_code": {"type": "int"},
"hold_reason_subcode": {"type": "int"},
"last_remote_host": {"type": "str", "len": "64", "nulls": "YES"},
"held_reason": {"type": "str", "len": "512", "nulls": "YES"},
"hold_job_reason": {"type": "str", "len": "64", "nulls": "YES"}
}
},
"condor_machines": {
"keys": [
"name"
],
"columns": {
"name": {"type": "str", "len": "128", "nulls": "NO"},
"htcondor_host_id": {"type": "int"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"condor_host": {"type": "str", "len": "64", "nulls": "YES"},
"flavor": {"type": "str", "len": "32", "nulls": "YES"},
"job_id": {"type": "str", "len": "128", "nulls": "YES"},
"global_job_id": {"type": "str", "len": "128", "nulls": "YES"},
"address": {"type": "str", "len": "512", "nulls": "YES"},
"state": {"type": "str", "len": "128", "nulls": "YES"},
"activity": {"type": "str", "len": "128", "nulls": "YES"},
"vm_type": {"type": "str", "len": "128", "nulls": "YES"},
"my_current_time": {"type": "int"},
"entered_current_state": {"type": "int"},
"start": {"type": "str", "len": "128", "nulls": "YES"},
"remote_owner": {"type": "str", "len": "128", "nulls": "YES"},
"total_disk": {"type": "int"},
"slot_type": {"type": "str", "len": "128", "nulls": "YES"},
"slot_cpus": {"type": "int"},
"total_slots": {"type": "int"},
"idle_time": {"type": "int"},
"deprecated-retire_request_time": {"type": "int"},
"deprecated-retired_time": {"type": "int"}
}
},
"condor_worker_gsi": {
"keys": [
"htcondor_fqdn"
],
"columns": {
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "NO"},
"htcondor_host_id": {"type": "int"},
"worker_dn": {"type": "str", "len": "128", "nulls": "YES"},
"worker_eol": {"type": "int"},
"worker_cert": {"type": "str", "nulls": "YES"},
"worker_key": {"type": "str", "nulls": "YES"}
}
},
"csv2_attribute_mapping": {
"keys": [
"csv2"
],
"columns": {
"csv2": {"type": "str", "len": "64", "nulls": "NO"},
"os_limits": {"type": "str", "len": "64", "nulls": "YES"},
"os_flavors": {"type": "str", "len": "64", "nulls": "YES"},
"os_images": {"type": "str", "len": "64", "nulls": "YES"},
"os_networks": {"type": "str", "len": "64", "nulls": "YES"},
"os_vms": {"type": "str", "len": "64", "nulls": "YES"},
"os_sec_grps": {"type": "str", "len": "64", "nulls": "YES"},
"condor": {"type": "str", "len": "64", "nulls": "YES"},
"ec2_flavors": {"type": "str", "len": "64", "nulls": "YES"},
"ec2_limits": {"type": "str", "len": "64", "nulls": "YES"},
"ec2_regions": {"type": "str", "len": "64", "nulls": "YES"},
"ec2_images": {"type": "str", "len": "64", "nulls": "YES"},
"ec2_vms": {"type": "str", "len": "20", "nulls": "NO"}
}
},
"csv2_cloud_aliases": {
"keys": [
"group_name",
"cloud_name",
"alias_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"alias_name": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"csv2_cloud_flavor_exclusions": {
"keys": [
"group_name",
"cloud_name",
"flavor_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"flavor_name": {"type": "str", "len": "128", "nulls": "NO"}
}
},
"csv2_cloud_metadata": {
"keys": [
"group_name",
"cloud_name",
"metadata_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"metadata_name": {"type": "str", "len": "64", "nulls": "NO"},
"enabled": {"type": "int"},
"priority": {"type": "int"},
"metadata": {"type": "str", "nulls": "NO"},
"mime_type": {"type": "str", "len": "128", "nulls": "NO"}
}
},
"csv2_cloud_types": {
"keys": [
"cloud_type"
],
"columns": {
"cloud_type": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"csv2_clouds": {
"keys": [
"group_name",
"cloud_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"enabled": {"type": "int"},
"priority": {"type": "int"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"username": {"type": "str", "len": "20", "nulls": "NO"},
"userid": {"type": "str", "len": "64", "nulls": "YES"},
"password": {"type": "str", "nulls": "NO"},
"obsolete_keyname": {"type": "str", "len": "20", "nulls": "YES"},
"cacertificate": {"type": "str", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"user_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"user_domain_id": {"type": "str", "len": "64", "nulls": "YES"},
"project_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"project_domain_id": {"type": "str", "len": "64", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "NO"},
"ec2_owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"auth_type": {"type": "str", "len": "32", "nulls": "YES"},
"app_credentials": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_secret": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_expiry": {"type": "int"},
"communication_up": {"type": "int"},
"communication_rt": {"type": "int"},
"server_meta_ctl": {"type": "int"},
"instances_ctl": {"type": "int"},
"personality_ctl": {"type": "int"},
"image_meta_ctl": {"type": "int"},
"personality_size_ctl": {"type": "int"},
"ram_ctl": {"type": "int"},
"server_groups_ctl": {"type": "int"},
"security_group_rules_ctl": {"type": "int"},
"keypairs_ctl": {"type": "int"},
"security_groups_ctl": {"type": "int"},
"server_group_members_ctl": {"type": "int"},
"floating_ips_ctl": {"type": "int"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"spot_price": {"type": "float"},
"vm_boot_volume": {"type": "str", "len": "64", "nulls": "YES"},
"vm_flavor": {"type": "str", "len": "64", "nulls": "NO"},
"vm_image": {"type": "str", "len": "64", "nulls": "NO"},
"vm_keep_alive": {"type": "int"},
"vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"vm_network": {"type": "str", "len": "64", "nulls": "NO"},
"vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"error_count": {"type": "int"},
"error_time": {"type": "int"},
"machine_subprocess_pid": {"type": "int"}
}
},
"csv2_configuration": {
"keys": [
"category",
"config_key"
],
"columns": {
"category": {"type": "str", "len": "32", "nulls": "NO"},
"config_key": {"type": "str", "len": "32", "nulls": "NO"},
"config_type": {"type": "str", "len": "16", "nulls": "NO"},
"config_value": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"csv2_group_metadata": {
"keys": [
"group_name",
"metadata_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"metadata_name": {"type": "str", "len": "64", "nulls": "NO"},
"enabled": {"type": "int"},
"priority": {"type": "int"},
"metadata": {"type": "str", "nulls": "NO"},
"mime_type": {"type": "str", "len": "128", "nulls": "NO"}
}
},
"csv2_group_metadata_exclusions": {
"keys": [
"group_name",
"metadata_name",
"cloud_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"metadata_name": {"type": "str", "len": "64", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"csv2_groups": {
"keys": [
"group_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_host_id": {"type": "int"},
"htcondor_gsi_dn": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_gsi_eol": {"type": "int"},
"htcondor_container_hostname": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_other_submitters": {"type": "str", "len": "128", "nulls": "YES"},
"job_cpus": {"type": "int"},
"job_ram": {"type": "int"},
"job_disk": {"type": "int"},
"job_scratch": {"type": "int"},
"job_swap": {"type": "int"},
"vm_flavor": {"type": "str", "len": "64", "nulls": "NO"},
"vm_image": {"type": "str", "len": "64", "nulls": "NO"},
"vm_keep_alive": {"type": "int"},
"vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"vm_network": {"type": "str", "len": "64", "nulls": "NO"},
"vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"csv2_image_cache": {
"keys": [
"image_name",
"checksum"
],
"columns": {
"image_name": {"type": "str", "len": "256", "nulls": "NO"},
"checksum": {"type": "str", "len": "64", "nulls": "NO"},
"container_format": {"type": "str", "len": "128", "nulls": "NO"},
"disk_format": {"type": "str", "len": "128", "nulls": "YES"},
"downloaded": {"type": "str", "nulls": "NO"}
}
},
"csv2_image_pull_requests": {
"keys": [
"tx_id"
],
"columns": {
"tx_id": {"type": "str", "len": "16", "nulls": "NO"},
"target_group_name": {"type": "str", "len": "128", "nulls": "NO"},
"target_cloud_name": {"type": "str", "len": "128", "nulls": "NO"},
"image_name": {"type": "str", "len": "128", "nulls": "NO"},
"image_id": {"type": "str", "len": "128", "nulls": "NO"},
"checksum": {"type": "str", "len": "64", "nulls": "NO"},
"status": {"type": "str", "len": "32", "nulls": "NO"},
"message": {"type": "str", "len": "512", "nulls": "YES"},
"request_time": {"type": "str", "nulls": "NO"},
"requester": {"type": "str", "len": "64", "nulls": "NO"}
}
},
"csv2_image_transactions": {
"keys": [
"tx_id"
],
"columns": {
"tx_id": {"type": "str", "len": "16", "nulls": "NO"},
"status": {"type": "str", "len": "128", "nulls": "NO"},
"message": {"type": "str", "len": "128", "nulls": "YES"},
"target_group_name": {"type": "str", "len": "128", "nulls": "NO"},
"target_cloud_name": {"type": "str", "len": "128", "nulls": "NO"},
"image_name": {"type": "str", "len": "128", "nulls": "NO"},
"image_id": {"type": "str", "len": "128", "nulls": "NO"},
"checksum": {"type": "str", "len": "64", "nulls": "NO"},
"request_time": {"type": "str", "nulls": "NO"},
"requester": {"type": "str", "len": "64", "nulls": "NO"}
}
},
"csv2_mime_types": {
"keys": [
"mime_type"
],
"columns": {
"mime_type": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"csv2_service_catalog": {
"keys": [
"provider",
"host_id"
],
"columns": {
"provider": {"type": "str", "len": "64", "nulls": "NO"},
"host_id": {"type": "int"},
"last_updated": {"type": "float"},
"last_error": {"type": "float"},
"error_message": {"type": "str", "len": "512", "nulls": "YES"},
"counter": {"type": "int"}
}
},
"csv2_service_providers": {
"keys": [
"provider"
],
"columns": {
"provider": {"type": "str", "len": "64", "nulls": "NO"},
"service": {"type": "str", "len": "64", "nulls": "NO"},
"alias": {"type": "str", "len": "16", "nulls": "YES"},
"alias_priority": {"type": "int"}
}
},
"csv2_signal_log": {
"keys": [
"timestamp",
"fqdn",
"pid",
"event",
"action"
],
"columns": {
"timestamp": {"type": "float"},
"fqdn": {"type": "str", "len": "128", "nulls": "NO"},
"pid": {"type": "int"},
"event": {"type": "str", "len": "64", "nulls": "NO"},
"action": {"type": "str", "len": "64", "nulls": "NO"},
"signame": {"type": "str", "len": "16", "nulls": "NO"},
"caller": {"type": "str", "len": "256", "nulls": "NO"}
}
},
"csv2_user": {
"keys": [
"username"
],
"columns": {
"username": {"type": "str", "len": "32", "nulls": "NO"},
"cert_cn": {"type": "str", "len": "128", "nulls": "YES"},
"password": {"type": "str", "len": "128", "nulls": "NO"},
"is_superuser": {"type": "int"},
"join_date": {"type": "str", "nulls": "NO"},
"default_group": {"type": "str", "len": "32", "nulls": "YES"},
"flag_global_status": {"type": "int"},
"flag_jobs_by_target_alias": {"type": "int"},
"flag_show_foreign_global_vms": {"type": "int"},
"flag_show_slot_detail": {"type": "int"},
"flag_show_slot_flavors": {"type": "int"},
"status_refresh_interval": {"type": "int"}
}
},
"csv2_user_groups": {
"keys": [
"username",
"group_name"
],
"columns": {
"username": {"type": "str", "len": "32", "nulls": "NO"},
"group_name": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"csv2_vms": {
"keys": [
"group_name",
"cloud_name",
"vmid"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "YES"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"spot_instance": {"type": "int"},
"instance_id": {"type": "str", "len": "64", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"vm_ips": {"type": "str", "len": "128", "nulls": "YES"},
"vm_floating_ips": {"type": "str", "len": "128", "nulls": "YES"},
"auth_url": {"type": "str", "len": "128", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"hostname": {"type": "str", "len": "128", "nulls": "NO"},
"keep_alive": {"type": "int"},
"start_time": {"type": "int"},
"status": {"type": "str", "len": "32", "nulls": "YES"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"image_id": {"type": "str", "len": "128", "nulls": "YES"},
"task": {"type": "str", "len": "32", "nulls": "YES"},
"power_status": {"type": "int"},
"manual_control": {"type": "int"},
"htcondor_startd_errors": {"type": "str", "len": "256", "nulls": "YES"},
"htcondor_startd_time": {"type": "int"},
"htcondor_partitionable_slots": {"type": "int"},
"htcondor_dynamic_slots": {"type": "int"},
"htcondor_slots_timestamp": {"type": "int"},
"retire": {"type": "int"},
"retire_time": {"type": "int"},
"terminate": {"type": "int"},
"terminate_time": {"type": "int"},
"status_changed_time": {"type": "int"},
"last_updated": {"type": "int"},
"updater": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"csv2_vms_foreign": {
"keys": [
"authurl",
"region",
"project",
"flavor_id"
],
"columns": {
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"project": {"type": "str", "len": "32", "nulls": "NO"},
"flavor_id": {"type": "str", "len": "128", "nulls": "NO"},
"count": {"type": "int"},
"cloud_type": {"type": "str", "len": "32", "nulls": "YES"}
}
},
"django_admin_log": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"action_time": {"type": "str", "nulls": "NO"},
"object_id": {"type": "str", "nulls": "YES"},
"object_repr": {"type": "str", "len": "200", "nulls": "NO"},
"action_flag": {"type": "int"},
"change_message": {"type": "str", "nulls": "NO"},
"content_type_id": {"type": "int"},
"user_id": {"type": "int"}
}
},
"django_content_type": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"app_label": {"type": "str", "len": "100", "nulls": "NO"},
"model": {"type": "str", "len": "100", "nulls": "NO"}
}
},
"django_migrations": {
"keys": [
"id"
],
"columns": {
"id": {"type": "int"},
"app": {"type": "str", "len": "255", "nulls": "NO"},
"name": {"type": "str", "len": "255", "nulls": "NO"},
"applied": {"type": "str", "nulls": "NO"}
}
},
"django_session": {
"keys": [
"session_key"
],
"columns": {
"session_key": {"type": "str", "len": "40", "nulls": "NO"},
"session_data": {"type": "str", "nulls": "NO"},
"expire_date": {"type": "str", "nulls": "NO"}
}
},
"ec2_image_filters": {
"keys": [
"group_name",
"cloud_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"owner_aliases": {"type": "str", "len": "128", "nulls": "YES"},
"owner_ids": {"type": "str", "len": "128", "nulls": "YES"},
"like": {"type": "str", "len": "128", "nulls": "YES"},
"not_like": {"type": "str", "len": "128", "nulls": "YES"},
"operating_systems": {"type": "str", "len": "128", "nulls": "YES"},
"architectures": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"ec2_image_well_known_owner_aliases": {
"keys": [
"alias"
],
"columns": {
"alias": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"ec2_images": {
"keys": [
"region",
"id",
"borrower_id"
],
"columns": {
"region": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"borrower_id": {"type": "str", "len": "32", "nulls": "NO"},
"owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"owner_alias": {"type": "str", "len": "64", "nulls": "YES"},
"disk_format": {"type": "str", "len": "128", "nulls": "YES"},
"size": {"type": "int"},
"image_location": {"type": "str", "len": "512", "nulls": "YES"},
"visibility": {"type": "str", "len": "128", "nulls": "YES"},
"name": {"type": "str", "len": "256", "nulls": "YES"},
"description": {"type": "str", "len": "256", "nulls": "YES"},
"last_updated": {"type": "int"}
}
},
"ec2_instance_status_codes": {
"keys": [
"ec2_state"
],
"columns": {
"ec2_state": {"type": "str", "len": "32", "nulls": "NO"},
"csv2_state": {"type": "str", "len": "32", "nulls": "NO"}
}
},
"ec2_instance_type_filters": {
"keys": [
"group_name",
"cloud_name"
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"families": {"type": "str", "len": "128", "nulls": "YES"},
"operating_systems": {"type": "str", "len": "128", "nulls": "YES"},
"processors": {"type": "str", "len": "128", "nulls": "YES"},
"processor_manufacturers": {"type": "str", "len": "128", "nulls": "YES"},
"cores": {"type": "str", "len": "32", "nulls": "YES"},
"memory_min_gigabytes_per_core": {"type": "int"},
"memory_max_gigabytes_per_core": {"type": "int"}
}
},
"ec2_instance_types": {
"keys": [
"region",
"instance_type",
"operating_system"
],
"columns": {
"region": {"type": "str", "len": "32", "nulls": "NO"},
"instance_type": {"type": "str", "len": "32", "nulls": "NO"},
"operating_system": {"type": "str", "len": "32", "nulls": "NO"},
"instance_family": {"type": "str", "len": "32", "nulls": "YES"},
"processor": {"type": "str", "len": "64", "nulls": "YES"},
"storage": {"type": "str", "len": "32", "nulls": "YES"},
"cores": {"type": "int"},
"memory": {"type": "float"},
"cost_per_hour": {"type": "float"}
}
},
"ec2_regions": {
"keys": [
"region"
],
"columns": {
"region": {"type": "str", "len": "64", "nulls": "NO"},
"location": {"type": "str", "len": "64", "nulls": "NO"},
"endpoint": {"type": "str", "len": "128", "nulls": "NO"}
}
},
"view_active_resource_shortfall": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"request_cores": {"type": "int"},
"active_cores": {"type": "int"},
"shortfall_cores": {"type": "int"},
"request_disk": {"type": "int"},
"active_disk": {"type": "int"},
"shortfall_disk": {"type": "int"},
"request_ram": {"type": "int"},
"active_ram": {"type": "int"},
"shortfall_ram": {"type": "int"},
"starting": {"type": "int"},
"unregistered": {"type": "int"},
"idle": {"type": "int"},
"running": {"type": "int"}
}
},
"view_available_resources": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_priority": {"type": "int"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "NO"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_container_hostname": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_other_submitters": {"type": "str", "len": "128", "nulls": "YES"},
"vm_boot_volume": {"type": "str", "len": "64", "nulls": "YES"},
"spot_price": {"type": "float"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"cacertificate": {"type": "str", "nulls": "YES"},
"project_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"project_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"user_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"user_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"username": {"type": "str", "len": "20", "nulls": "NO"},
"password": {"type": "str", "nulls": "NO"},
"default_flavor": {"type": "str", "len": "97", "nulls": "YES"},
"default_image": {"type": "str", "len": "64", "nulls": "YES"},
"default_keep_alive": {"type": "int"},
"default_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"default_network": {"type": "str", "len": "64", "nulls": "YES"},
"default_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"VMs": {"type": "int"},
"VMs_max": {"type": "int"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"cores_limit": {"type": "int"},
"cores_max": {"type": "int"},
"cores_used": {"type": "int"},
"cores_foreign": {"type": "int"},
"disk_used": {"type": "int"},
"ram_ctl": {"type": "int"},
"ram_max": {"type": "int"},
"ram_limit": {"type": "float"},
"ram_used": {"type": "int"},
"ram_foreign": {"type": "float"},
"swap_used": {"type": "int"},
"flavor": {"type": "str", "len": "161", "nulls": "YES"},
"flavor_id": {"type": "str", "len": "128", "nulls": "NO"},
"volumes_max": {"type": "int"},
"volumes_used": {"type": "int"},
"volume_gigs_max": {"type": "int"},
"volume_gigs_used": {"type": "int"},
"flavor_slots": {"type": "int"},
"flavor_cores": {"type": "int"},
"flavor_disk": {"type": "int"},
"flavor_ram": {"type": "int"},
"flavor_swap": {"type": "int"},
"auth_type": {"type": "str", "len": "32", "nulls": "YES"},
"app_credentials": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_secret": {"type": "str", "len": "128", "nulls": "YES"},
"flavor_VMs": {"type": "int"},
"flavor_starting": {"type": "int"},
"flavor_unregistered": {"type": "int"},
"flavor_idle": {"type": "int"},
"flavor_running": {"type": "int"},
"flavor_retiring": {"type": "int"},
"flavor_error": {"type": "int"},
"flavor_manual": {"type": "int"},
"updater": {"type": "str", "nulls": "YES"},
"worker_cert": {"type": "str", "nulls": "NO"},
"worker_key": {"type": "str", "nulls": "NO"}
}
},
"view_cloud_aliases": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"alias_name": {"type": "str", "len": "32", "nulls": "NO"},
"clouds": {"type": "str", "nulls": "YES"}
}
},
"view_cloud_status": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"app_credentials_expiry": {"type": "int"},
"VMs": {"type": "int"},
"VMs_manual": {"type": "int"},
"VMs_in_error": {"type": "int"},
"VMs_starting": {"type": "int"},
"VMs_retiring": {"type": "int"},
"VMs_unregistered": {"type": "int"},
"VMs_idle": {"type": "int"},
"VMs_running": {"type": "int"},
"cores_native": {"type": "int"},
"ram_native": {"type": "float"},
"slot_count": {"type": "int"},
"slot_core_count": {"type": "int"},
"slot_idle_core_count": {"type": "int"},
"Foreign_VMs": {"type": "int"},
"enabled": {"type": "int"},
"communication_up": {"type": "int"},
"communication_rt": {"type": "int"},
"cores_ctl": {"type": "int"},
"cores_limit": {"type": "int"},
"VMs_quota": {"type": "int"},
"VMs_native_foreign": {"type": "int"},
"cores_quota": {"type": "int"},
"cores_soft_quota": {"type": "int"},
"cores_foreign": {"type": "int"},
"cores_native_foreign": {"type": "int"},
"ram_ctl": {"type": "int"},
"ram_limit": {"type": "int"},
"ram_quota": {"type": "int"},
"ram_foreign": {"type": "float"},
"ram_native_foreign": {"type": "float"},
"volume_gigs_max": {"type": "int"},
"volume_gigs_used": {"type": "int"}
}
},
"view_cloud_status_flavor_slot_detail": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"flavor": {"type": "str", "len": "46", "nulls": "YES"},
"slot_type": {"type": "int"},
"slot_id": {"type": "str", "len": "380", "nulls": "YES"},
"slot_count": {"type": "int"},
"core_count": {"type": "int"}
}
},
"view_cloud_status_flavor_slot_detail_summary": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"flavor": {"type": "str", "len": "46", "nulls": "YES"},
"slot_type": {"type": "int"},
"slot_count": {"type": "int"},
"core_count": {"type": "int"}
}
},
"view_cloud_status_flavor_slot_summary": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"flavor": {"type": "str", "len": "46", "nulls": "YES"},
"busy": {"type": "int"},
"idle": {"type": "int"},
"idle_percent": {"type": "int"}
}
},
"view_cloud_status_slot_detail": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"slot_type": {"type": "int"},
"slot_id": {"type": "str", "len": "380", "nulls": "YES"},
"slot_count": {"type": "int"},
"core_count": {"type": "int"}
}
},
"view_cloud_status_slot_detail_summary": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"slot_type": {"type": "int"},
"slot_count": {"type": "int"},
"core_count": {"type": "int"}
}
},
"view_cloud_status_slot_summary": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"busy": {"type": "int"},
"idle": {"type": "int"},
"idle_percent": {"type": "int"}
}
},
"view_clouds": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"enabled": {"type": "int"},
"cloud_priority": {"type": "int"},
"spot_price": {"type": "float"},
"vm_boot_volume": {"type": "str", "len": "64", "nulls": "YES"},
"vm_flavor": {"type": "str", "len": "64", "nulls": "NO"},
"vm_image": {"type": "str", "len": "64", "nulls": "NO"},
"vm_keep_alive": {"type": "int"},
"vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"vm_network": {"type": "str", "len": "64", "nulls": "NO"},
"vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"userid": {"type": "str", "len": "64", "nulls": "YES"},
"auth_type": {"type": "str", "len": "32", "nulls": "YES"},
"app_credentials": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_secret": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_expiry": {"type": "int"},
"cascading_vm_flavor": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_image": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_keep_alive": {"type": "int"},
"cascading_vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_network": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"project_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"project_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"user_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"user_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"username": {"type": "str", "len": "20", "nulls": "NO"},
"password": {"type": "str", "nulls": "NO"},
"cacertificate": {"type": "str", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "NO"},
"ec2_owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"cores_max": {"type": "int"},
"cores_used": {"type": "int"},
"cores_foreign": {"type": "int"},
"cores_native": {"type": "int"},
"ram_ctl": {"type": "int"},
"ram_max": {"type": "int"},
"ram_used": {"type": "int"},
"ram_foreign": {"type": "int"},
"ram_native": {"type": "int"},
"instances_max": {"type": "int"},
"instances_used": {"type": "int"},
"floating_ips_max": {"type": "int"},
"floating_ips_used": {"type": "int"},
"security_groups_max": {"type": "int"},
"security_groups_used": {"type": "int"},
"server_groups_max": {"type": "int"},
"server_groups_used": {"type": "int"},
"image_meta_max": {"type": "int"},
"keypairs_max": {"type": "int"},
"personality_max": {"type": "int"},
"personality_size_max": {"type": "int"},
"security_group_rules_max": {"type": "int"},
"server_group_members_max": {"type": "int"},
"server_meta_max": {"type": "int"},
"cores_idle": {"type": "int"},
"ram_idle": {"type": "int"}
}
},
"view_clouds_with_metadata_info": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"enabled": {"type": "int"},
"cloud_priority": {"type": "int"},
"spot_price": {"type": "float"},
"vm_boot_volume": {"type": "str", "len": "64", "nulls": "YES"},
"vm_flavor": {"type": "str", "len": "64", "nulls": "NO"},
"vm_image": {"type": "str", "len": "64", "nulls": "NO"},
"vm_keep_alive": {"type": "int"},
"vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"vm_network": {"type": "str", "len": "64", "nulls": "NO"},
"vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"userid": {"type": "str", "len": "64", "nulls": "YES"},
"auth_type": {"type": "str", "len": "32", "nulls": "YES"},
"app_credentials": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_secret": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_expiry": {"type": "int"},
"cascading_vm_flavor": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_image": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_keep_alive": {"type": "int"},
"cascading_vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_network": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"project_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"project_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"user_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"user_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"username": {"type": "str", "len": "20", "nulls": "NO"},
"password": {"type": "str", "nulls": "NO"},
"cacertificate": {"type": "str", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "NO"},
"ec2_owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"cores_max": {"type": "int"},
"cores_used": {"type": "int"},
"cores_foreign": {"type": "int"},
"cores_native": {"type": "int"},
"ram_ctl": {"type": "int"},
"ram_max": {"type": "int"},
"ram_used": {"type": "int"},
"ram_foreign": {"type": "int"},
"ram_native": {"type": "int"},
"instances_max": {"type": "int"},
"instances_used": {"type": "int"},
"floating_ips_max": {"type": "int"},
"floating_ips_used": {"type": "int"},
"security_groups_max": {"type": "int"},
"security_groups_used": {"type": "int"},
"server_groups_max": {"type": "int"},
"server_groups_used": {"type": "int"},
"image_meta_max": {"type": "int"},
"keypairs_max": {"type": "int"},
"personality_max": {"type": "int"},
"personality_size_max": {"type": "int"},
"security_group_rules_max": {"type": "int"},
"server_group_members_max": {"type": "int"},
"server_meta_max": {"type": "int"},
"cores_idle": {"type": "int"},
"ram_idle": {"type": "int"},
"metadata_name": {"type": "str", "len": "64", "nulls": "YES"},
"metadata_enabled": {"type": "int"},
"metadata_priority": {"type": "int"},
"metadata_mime_type": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"view_clouds_with_metadata_names": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"enabled": {"type": "int"},
"cloud_priority": {"type": "int"},
"spot_price": {"type": "float"},
"vm_boot_volume": {"type": "str", "len": "64", "nulls": "YES"},
"vm_flavor": {"type": "str", "len": "64", "nulls": "NO"},
"vm_image": {"type": "str", "len": "64", "nulls": "NO"},
"vm_keep_alive": {"type": "int"},
"vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"vm_network": {"type": "str", "len": "64", "nulls": "NO"},
"vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"userid": {"type": "str", "len": "64", "nulls": "YES"},
"auth_type": {"type": "str", "len": "32", "nulls": "YES"},
"app_credentials": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_secret": {"type": "str", "len": "128", "nulls": "YES"},
"app_credentials_expiry": {"type": "int"},
"cascading_vm_flavor": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_image": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_keep_alive": {"type": "int"},
"cascading_vm_keyname": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_network": {"type": "str", "len": "64", "nulls": "YES"},
"cascading_vm_security_groups": {"type": "str", "len": "128", "nulls": "YES"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"project_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"project_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"user_domain_name": {"type": "str", "len": "20", "nulls": "NO"},
"user_domain_id": {"type": "str", "len": "64", "nulls": "NO"},
"username": {"type": "str", "len": "20", "nulls": "NO"},
"password": {"type": "str", "nulls": "NO"},
"cacertificate": {"type": "str", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_type": {"type": "str", "len": "64", "nulls": "NO"},
"ec2_owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"cores_max": {"type": "int"},
"cores_used": {"type": "int"},
"cores_foreign": {"type": "int"},
"cores_native": {"type": "int"},
"ram_ctl": {"type": "int"},
"ram_max": {"type": "int"},
"ram_used": {"type": "int"},
"ram_foreign": {"type": "int"},
"ram_native": {"type": "int"},
"instances_max": {"type": "int"},
"instances_used": {"type": "int"},
"floating_ips_max": {"type": "int"},
"floating_ips_used": {"type": "int"},
"security_groups_max": {"type": "int"},
"security_groups_used": {"type": "int"},
"server_groups_max": {"type": "int"},
"server_groups_used": {"type": "int"},
"image_meta_max": {"type": "int"},
"keypairs_max": {"type": "int"},
"personality_max": {"type": "int"},
"personality_size_max": {"type": "int"},
"security_group_rules_max": {"type": "int"},
"server_group_members_max": {"type": "int"},
"server_meta_max": {"type": "int"},
"cores_idle": {"type": "int"},
"ram_idle": {"type": "int"},
"flavor_exclusions": {"type": "str", "nulls": "YES"},
"flavor_names": {"type": "str", "nulls": "YES"},
"group_exclusions": {"type": "str", "nulls": "YES"},
"metadata_names": {"type": "str", "nulls": "YES"}
}
},
"view_condor_host": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"hostname": {"type": "str", "len": "128", "nulls": "NO"},
"primary_slots": {"type": "int"},
"dynamic_slots": {"type": "int"},
"retire": {"type": "int"},
"terminate": {"type": "int"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"updater": {"type": "str", "len": "128", "nulls": "YES"},
"retire_time": {"type": "int"}
}
},
"view_condor_jobs_group_defaults_applied": {
"keys": [
],
"columns": {
"global_job_id": {"type": "str", "len": "128", "nulls": "NO"},
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"job_status": {"type": "int"},
"request_cpus": {"type": "int"},
"request_disk": {"type": "int"},
"request_ram": {"type": "int"},
"request_swap": {"type": "int"},
"requirements": {"type": "str", "len": "512", "nulls": "YES"},
"job_priority": {"type": "int"},
"cluster_id": {"type": "int"},
"proc_id": {"type": "int"},
"user": {"type": "str", "len": "512", "nulls": "YES"},
"image": {"type": "str", "nulls": "YES"},
"instance_type": {"type": "str", "len": "512", "nulls": "YES"},
"network": {"type": "str", "len": "512", "nulls": "YES"},
"keep_alive": {"type": "str", "len": "512", "nulls": "YES"},
"max_price": {"type": "str", "len": "512", "nulls": "YES"},
"user_data": {"type": "str", "len": "512", "nulls": "YES"},
"job_per_core": {"type": "int"},
"entered_current_status": {"type": "int"},
"q_date": {"type": "int"},
"hold_job_reason": {"type": "str", "len": "64", "nulls": "YES"},
"held_reason": {"type": "str", "len": "512", "nulls": "YES"},
"js_idle": {"type": "int"},
"js_running": {"type": "int"},
"js_completed": {"type": "int"},
"js_held": {"type": "int"},
"js_other": {"type": "int"}
}
},
"view_ec2_images": {
"keys": [
],
"columns": {
"region": {"type": "str", "len": "32", "nulls": "NO"},
"id": {"type": "str", "len": "128", "nulls": "NO"},
"borrower_id": {"type": "str", "len": "32", "nulls": "NO"},
"owner_id": {"type": "str", "len": "32", "nulls": "YES"},
"owner_alias": {"type": "str", "len": "64", "nulls": "YES"},
"disk_format": {"type": "str", "len": "128", "nulls": "YES"},
"size": {"type": "int"},
"image_location": {"type": "str", "len": "512", "nulls": "YES"},
"visibility": {"type": "str", "len": "128", "nulls": "YES"},
"name": {"type": "str", "len": "256", "nulls": "YES"},
"description": {"type": "str", "len": "256", "nulls": "YES"},
"last_updated": {"type": "int"},
"lower_location": {"type": "str", "len": "512", "nulls": "YES"},
"opsys": {"type": "str", "len": "8", "nulls": "YES"},
"arch": {"type": "str", "len": "5", "nulls": "YES"}
}
},
"view_ec2_instance_types": {
"keys": [
],
"columns": {
"region": {"type": "str", "len": "32", "nulls": "NO"},
"instance_type": {"type": "str", "len": "32", "nulls": "NO"},
"operating_system": {"type": "str", "len": "32", "nulls": "NO"},
"instance_family": {"type": "str", "len": "32", "nulls": "YES"},
"processor": {"type": "str", "len": "64", "nulls": "YES"},
"storage": {"type": "str", "len": "32", "nulls": "YES"},
"cores": {"type": "int"},
"memory": {"type": "float"},
"cost_per_hour": {"type": "float"},
"memory_per_core": {"type": "float"},
"processor_manufacturer": {"type": "str", "len": "64", "nulls": "YES"}
}
},
"view_foreign_flavors": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"authurl": {"type": "str", "len": "128", "nulls": "NO"},
"region": {"type": "str", "len": "32", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"count": {"type": "int"},
"name": {"type": "str", "len": "128", "nulls": "YES"},
"cores": {"type": "int"},
"ram": {"type": "float"}
}
},
"view_foreign_resources": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"count": {"type": "int"},
"cores": {"type": "int"},
"ram": {"type": "float"}
}
},
"view_groups_of_idle_jobs": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"instance_type": {"type": "str", "len": "512", "nulls": "YES"},
"requirements": {"type": "str", "len": "512", "nulls": "YES"},
"job_priority": {"type": "int"},
"user": {"type": "str", "len": "512", "nulls": "YES"},
"image": {"type": "str", "nulls": "YES"},
"network": {"type": "str", "len": "512", "nulls": "YES"},
"keep_alive": {"type": "str", "len": "512", "nulls": "YES"},
"max_price": {"type": "str", "len": "512", "nulls": "YES"},
"user_data": {"type": "str", "len": "512", "nulls": "YES"},
"job_per_core": {"type": "int"},
"request_cpus_min": {"type": "int"},
"request_cpus_max": {"type": "int"},
"request_cpus_total": {"type": "int"},
"request_disk_min": {"type": "int"},
"request_disk_max": {"type": "int"},
"request_disk_total": {"type": "int"},
"request_ram_min": {"type": "int"},
"request_ram_max": {"type": "int"},
"request_ram_total": {"type": "int"},
"request_swap_min": {"type": "int"},
"request_swap_max": {"type": "int"},
"request_swap_total": {"type": "int"},
"queue_date": {"type": "int"},
"idle": {"type": "int"},
"running": {"type": "int"},
"completed": {"type": "int"},
"held": {"type": "int"},
"other": {"type": "int"},
"flavors": {"type": "str", "nulls": "YES"}
}
},
"view_groups_with_metadata_info": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_container_hostname": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_other_submitters": {"type": "str", "len": "128", "nulls": "YES"},
"metadata_name": {"type": "str", "len": "64", "nulls": "YES"},
"metadata_enabled": {"type": "int"},
"metadata_priority": {"type": "int"},
"metadata_mime_type": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"view_groups_with_metadata_names": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_container_hostname": {"type": "str", "len": "128", "nulls": "YES"},
"htcondor_other_submitters": {"type": "str", "len": "128", "nulls": "YES"},
"metadata_names": {"type": "str", "nulls": "YES"}
}
},
"view_idle_vms": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"come_alive": {"type": "str", "len": "128", "nulls": "YES"},
"job_alive": {"type": "str", "len": "128", "nulls": "YES"},
"error_delay": {"type": "str", "len": "128", "nulls": "YES"},
"keep_alive": {"type": "int"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"hostname": {"type": "str", "len": "128", "nulls": "NO"},
"primary_slots": {"type": "int"},
"dynamic_slots": {"type": "int"},
"retire": {"type": "int"},
"terminate": {"type": "int"},
"poller_status": {"type": "str", "len": "12", "nulls": "YES"},
"age": {"type": "int"}
}
},
"view_job_status": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"Jobs": {"type": "int"},
"Idle": {"type": "int"},
"Running": {"type": "int"},
"Completed": {"type": "int"},
"Held": {"type": "int"},
"Other": {"type": "int"},
"foreign": {"type": "int"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"state": {"type": "str", "len": "4", "nulls": "YES"},
"plotable_state": {"type": "str", "len": "1", "nulls": "YES"},
"error_message": {"type": "str", "len": "512", "nulls": "NO"},
"condor_days_left": {"type": "int"},
"worker_days_left": {"type": "int"}
}
},
"view_job_status_by_target_alias": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"Jobs": {"type": "int"},
"Idle": {"type": "int"},
"Running": {"type": "int"},
"Completed": {"type": "int"},
"Held": {"type": "int"},
"Other": {"type": "int"},
"foreign": {"type": "int"},
"htcondor_fqdn": {"type": "str", "len": "128", "nulls": "YES"},
"state": {"type": "str", "len": "4", "nulls": "YES"},
"plotable_state": {"type": "str", "len": "1", "nulls": "YES"},
"error_message": {"type": "str", "len": "512", "nulls": "NO"},
"condor_days_left": {"type": "int"},
"worker_days_left": {"type": "int"}
}
},
"view_metadata_collation": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"type": {"type": "str", "len": "5", "nulls": "NO"},
"priority": {"type": "int"},
"metadata_name": {"type": "str", "len": "64", "nulls": "YES"},
"mime_type": {"type": "str", "len": "128", "nulls": "YES"}
}
},
"view_metadata_collation_json": {
"keys": [
],
"columns": {
"group_metadata": {"type": "str", "nulls": "YES"}
}
},
"view_resource_contention": {
"keys": [
],
"columns": {
"authurl": {"type": "str", "len": "128", "nulls": "YES"},
"VMs": {"type": "int"},
"starting": {"type": "int"},
"unregistered": {"type": "int"},
"idle": {"type": "int"},
"running": {"type": "int"},
"retiring": {"type": "int"},
"manual": {"type": "int"},
"error": {"type": "int"}
}
},
"view_service_status": {
"keys": [
],
"columns": {
"alias": {"type": "str", "len": "16", "nulls": "YES"},
"state": {"type": "str", "len": "4", "nulls": "YES"},
"plotable_state": {"type": "str", "len": "1", "nulls": "YES"},
"error_message": {"type": "str", "len": "512", "nulls": "YES"}
}
},
"view_total_used_resources": {
"keys": [
],
"columns": {
"authurl": {"type": "str", "len": "128", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "YES"},
"project": {"type": "str", "len": "128", "nulls": "YES"},
"VMs": {"type": "int"},
"cores": {"type": "int"},
"disk": {"type": "int"},
"ram": {"type": "int"},
"swap": {"type": "int"}
}
},
"view_user_groups": {
"keys": [
],
"columns": {
"username": {"type": "str", "len": "32", "nulls": "NO"},
"cert_cn": {"type": "str", "len": "128", "nulls": "YES"},
"password": {"type": "str", "len": "128", "nulls": "NO"},
"is_superuser": {"type": "int"},
"join_date": {"type": "str", "nulls": "NO"},
"flag_global_status": {"type": "int"},
"flag_jobs_by_target_alias": {"type": "int"},
"flag_show_foreign_global_vms": {"type": "int"},
"flag_show_slot_detail": {"type": "int"},
"flag_show_slot_flavors": {"type": "int"},
"status_refresh_interval": {"type": "int"},
"default_group": {"type": "str", "len": "32", "nulls": "YES"},
"user_groups": {"type": "str", "nulls": "YES"},
"available_groups": {"type": "str", "nulls": "YES"}
}
},
"view_user_groups_available": {
"keys": [
],
"columns": {
"username": {"type": "str", "len": "32", "nulls": "NO"},
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"available": {"type": "str", "len": "32", "nulls": "YES"}
}
},
"view_vm_kill_retire_over_quota": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_name": {"type": "str", "len": "32", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"cores": {"type": "int"},
"cores_ctl": {"type": "int"},
"cores_softmax": {"type": "int"},
"cores_max": {"type": "int"},
"cores_native": {"type": "int"},
"cores_foreign": {"type": "int"},
"ram": {"type": "float"},
"ram_ctl": {"type": "int"},
"ram_max": {"type": "int"},
"ram_native": {"type": "float"},
"ram_foreign": {"type": "float"}
}
},
"view_vm_kill_retire_priority_age": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"killed": {"type": "int"},
"retired": {"type": "int"},
"priority": {"type": "int"},
"flavor_cores": {"type": "int"},
"flavor_ram": {"type": "int"}
}
},
"view_vm_kill_retire_priority_idle": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"killed": {"type": "int"},
"retired": {"type": "int"},
"priority": {"type": "int"},
"flavor_cores": {"type": "int"},
"flavor_ram": {"type": "int"}
}
},
"view_vms": {
"keys": [
],
"columns": {
"group_name": {"type": "str", "len": "32", "nulls": "NO"},
"cloud_name": {"type": "str", "len": "32", "nulls": "NO"},
"target_alias": {"type": "str", "len": "32", "nulls": "YES"},
"region": {"type": "str", "len": "32", "nulls": "YES"},
"vmid": {"type": "str", "len": "128", "nulls": "NO"},
"spot_instance": {"type": "int"},
"instance_id": {"type": "str", "len": "64", "nulls": "YES"},
"cloud_type": {"type": "str", "len": "64", "nulls": "YES"},
"vm_ips": {"type": "str", "len": "128", "nulls": "YES"},
"vm_floating_ips": {"type": "str", "len": "128", "nulls": "YES"},
"auth_url": {"type": "str", "len": "128", "nulls": "NO"},
"project": {"type": "str", "len": "128", "nulls": "NO"},
"hostname": {"type": "str", "len": "128", "nulls": "NO"},
"keep_alive": {"type": "int"},
"start_time": {"type": "int"},
"status": {"type": "str", "len": "32", "nulls": "YES"},
"flavor_id": {"type": "str", "len": "128", "nulls": "YES"},
"image_id": {"type": "str", "len": "128", "nulls": "YES"},
"task": {"type": "str", "len": "32", "nulls": "YES"},
"power_status": {"type": "int"},
"manual_control": {"type": "int"},
"htcondor_startd_errors": {"type": "str", "len": "256", "nulls": "YES"},
"htcondor_startd_time": {"type": "int"},
"htcondor_partitionable_slots": {"type": "int"},
"htcondor_dynamic_slots": {"type": "int"},
"htcondor_slots_timestamp": {"type": "int"},
"retire": {"type": "int"},
"retire_time": {"type": "int"},
"terminate": {"type": "int"},
"terminate_time": {"type": "int"},
"status_changed_time": {"type": "int"},
"last_updated": {"type": "int"},
"updater": {"type": "str", "len": "128", "nulls": "YES"},
"flavor_name": {"type": "str", "len": "128", "nulls": "YES"},
"condor_slots": {"type": "int"},
"condor_slots_used": {"type": "int"},
"machine": {"type": "str", "len": "256", "nulls": "YES"},
"my_current_time": {"type": "int"},
"entered_current_state": {"type": "int"},
"idle_time": {"type": "int"},
"foreign_vm": {"type": "int"},
"cores": {"type": "int"},
"disk": {"type": "int"},
"ram": {"type": "int"},
"swap": {"type": "int"},
"poller_status": {"type": "str", "len": "12", "nulls": "YES"},
"age": {"type": "int"}
}
}
}
| 44.820862
| 90
| 0.402788
| 7,481
| 79,064
| 4.074322
| 0.044646
| 0.138944
| 0.183071
| 0.070801
| 0.861253
| 0.830512
| 0.796883
| 0.728806
| 0.704298
| 0.680709
| 0
| 0.026174
| 0.329278
| 79,064
| 1,763
| 91
| 44.846285
| 0.548595
| 0
| 0
| 0.678389
| 0
| 0
| 0.402737
| 0.033593
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.004538
| 0
| 0
| 0
| 0.001134
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f07bb56d39143762bbdb8d5d91d089c6001f8f26
| 9,703
|
py
|
Python
|
fdk_client/platform/models/ConfigurationValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/ConfigurationValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/models/ConfigurationValidator.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
"""Class Validators."""
from marshmallow import fields, Schema
from marshmallow.validate import OneOf
from ..enums import *
from ..models.BaseSchema import BaseSchema
class ConfigurationValidator:
class getBuildConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
platform_type = fields.Str(required=False)
class updateBuildConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
platform_type = fields.Str(required=False)
class getPreviousVersions(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
platform_type = fields.Str(required=False)
class getAppFeatures(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateAppFeatures(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppBasicDetails(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateAppBasicDetails(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppContactInfo(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateAppContactInfo(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppApiTokens(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateAppApiTokens(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppCompanies(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getAppStores(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getInventoryConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateInventoryConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class partiallyUpdateInventoryConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppCurrencyConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class updateAppCurrencyConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getAppSupportedCurrency(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getOrderingStoresByFilter(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class updateOrderingStoreConfig(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getStaffOrderingStores(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
q = fields.Str(required=False)
class getDomains(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class addDomain(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class removeDomainById(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
id = fields.Str(required=False)
class changeDomainType(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getDomainStatus(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class createApplication(BaseSchema):
company_id = fields.Str(required=False)
class getApplications(BaseSchema):
company_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
q = fields.Str(required=False)
class getApplicationById(BaseSchema):
company_id = fields.Str(required=False)
application_id = fields.Str(required=False)
class getCurrencies(BaseSchema):
company_id = fields.Str(required=False)
class getDomainAvailibility(BaseSchema):
company_id = fields.Str(required=False)
class getIntegrationById(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Int(required=False)
class getAvailableOptIns(BaseSchema):
company_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getSelectedOptIns(BaseSchema):
company_id = fields.Str(required=False)
level = fields.Str(required=False)
uid = fields.Int(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getIntegrationLevelConfig(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
level = fields.Str(required=False)
opted = fields.Boolean(required=False)
check_permission = fields.Boolean(required=False)
class updateLevelIntegration(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
level = fields.Str(required=False)
class getIntegrationByLevelId(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
level = fields.Str(required=False)
uid = fields.Int(required=False)
class updateLevelUidIntegration(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
level = fields.Str(required=False)
uid = fields.Int(required=False)
class getLevelActiveIntegrations(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
level = fields.Str(required=False)
uid = fields.Int(required=False)
class getBrandsByCompany(BaseSchema):
company_id = fields.Str(required=False)
q = fields.Str(required=False)
class getCompanyByBrands(BaseSchema):
company_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getStoreByBrands(BaseSchema):
company_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getOtherSellerApplications(BaseSchema):
company_id = fields.Str(required=False)
page_no = fields.Int(required=False)
page_size = fields.Int(required=False)
class getOtherSellerApplicationById(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
class optOutFromApplication(BaseSchema):
company_id = fields.Str(required=False)
id = fields.Str(required=False)
| 24.943445
| 57
| 0.57003
| 863
| 9,703
| 6.295481
| 0.090382
| 0.289527
| 0.294128
| 0.380637
| 0.794221
| 0.789435
| 0.789435
| 0.781152
| 0.748205
| 0.739186
| 0
| 0
| 0.351747
| 9,703
| 389
| 58
| 24.943445
| 0.863752
| 0.001752
| 0
| 0.686047
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.296512
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
333aa939d24162fb8584e0f926cb9b4f295edc2f
| 2,236
|
py
|
Python
|
insights/parsers/neutron_server_log.py
|
mglantz/insights-core
|
6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4
|
[
"Apache-2.0"
] | 121
|
2017-05-30T20:23:25.000Z
|
2022-03-23T12:52:15.000Z
|
insights/parsers/neutron_server_log.py
|
mglantz/insights-core
|
6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4
|
[
"Apache-2.0"
] | 1,977
|
2017-05-26T14:36:03.000Z
|
2022-03-31T10:38:53.000Z
|
insights/parsers/neutron_server_log.py
|
mglantz/insights-core
|
6f20bbbe03f53ee786f483b2a28d256ff1ad0fd4
|
[
"Apache-2.0"
] | 244
|
2017-05-30T20:22:57.000Z
|
2022-03-26T10:09:39.000Z
|
"""
NeutronServerLog - file ``/var/log/neutron/server.log``
=======================================================
"""
from .. import LogFileOutput, parser
from insights.specs import Specs
@parser(Specs.neutron_server_log)
class NeutronServerLog(LogFileOutput):
'''
Read the ``/var/log/neutron/server.log`` file.
.. note::
Please refer to its super-class :class:`insights.core.LogFileOutput` for
more usage information
Sample log file::
2016-09-13 05:56:45.155 30586 WARNING keystonemiddleware.auth_token [-] Identity response: {"error": {"message": "Could not find token: b45405915eb44e608885f894028d37b9", "code": 404, "title": "Not Found"}}
2016-09-13 05:56:45.156 30586 WARNING keystonemiddleware.auth_token [-] Authorization failed for token
2016-09-13 06:06:45.884 30588 WARNING keystonemiddleware.auth_token [-] Authorization failed for token
2016-09-13 06:06:45.886 30588 WARNING keystonemiddleware.auth_token [-] Identity response: {"error": {"message": "Could not find token: fd482ef0ba1144bf944a0a6c2badcdf8", "code": 404, "title": "Not Found"}}
2016-09-13 06:06:45.887 30588 WARNING keystonemiddleware.auth_token [-] Authorization failed for token
2016-09-13 06:06:46.131 30586 WARNING keystonemiddleware.auth_token [-] Authorization failed for token
2016-09-13 06:06:46.131 30586 WARNING keystonemiddleware.auth_token [-] Identity response: {"error": {"message": "Could not find token: bc029dbe33f84fbcb67ef7d592458e60", "code": 404, "title": "Not Found"}}
2016-09-13 06:06:46.132 30586 WARNING keystonemiddleware.auth_token [-] Authorization failed for token
Examples:
>>> neutron_log = shared[NeutronServerLog]
>>> neutron_log.get('Authorization')[0]['raw_message']
'2016-09-13 05:56:45.156 30586 WARNING keystonemiddleware.auth_token [-] Authorization failed for token'
>>> len(list(neutron_log.get_after(datetime.datetime(2016, 9, 13, 6, 0, 0))))
6
>>> neutron_log.get_after(datetime.datetime(2016, 9, 13, 6, 0, 0))[0]['raw_message']
'2016-09-13 06:06:45.884 30588 WARNING keystonemiddleware.auth_token [-] Authorization failed for token'
'''
pass
| 57.333333
| 214
| 0.687388
| 283
| 2,236
| 5.360424
| 0.268551
| 0.039552
| 0.052736
| 0.224127
| 0.715887
| 0.683586
| 0.657218
| 0.657218
| 0.644034
| 0.600527
| 0
| 0.16551
| 0.162343
| 2,236
| 38
| 215
| 58.842105
| 0.644421
| 0.882379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.6
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
d7baa8c81c56eeaf983845d5ff938a4845892106
| 9,954
|
py
|
Python
|
src/pmfuncs.py
|
modichirag/VI_reconstruction
|
64def5226a5723877a60943c29f592319bbe8e95
|
[
"MIT"
] | null | null | null |
src/pmfuncs.py
|
modichirag/VI_reconstruction
|
64def5226a5723877a60943c29f592319bbe8e95
|
[
"MIT"
] | null | null | null |
src/pmfuncs.py
|
modichirag/VI_reconstruction
|
64def5226a5723877a60943c29f592319bbe8e95
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import matplotlib.pyplot as plt
import tensorflow_probability as tfp
tfd = tfp.distributions
import sys, os
#sys.path.append('../../utils/')
sys.path.append('/mnt/home/cmodi/Research/Projects/flowpm-rk4')
import flowpm
from astropy.cosmology import Planck15
# from flowpm.tfpm import PerturbationGrowth
from flowpm import linear_field, lpt_init, nbody, cic_paint
from flowpm.utils import r2c3d, c2r3d
from flowpm.tfpower import linear_matter_power
from scipy.interpolate import InterpolatedUnivariateSpline as iuspline
#import tools
class Evolve():
def __init__(self, nc, bs, a0=0.1, af=1., nsteps=5, donbody=False, order=2, cosmodict=None, dtype=np.float32):
self.nc, self.bs = nc, bs
#self.ipklin = ipklin
self.a0, self.af, self.nsteps = a0, af, nsteps
self.stages = np.linspace(a0, af, nsteps, endpoint=True)
self.donbody = donbody
if cosmodict is None: self.cosmodict = flowpm.cosmology.Planck15().to_dict()
else: self.cosmodict = cosmodict
self.order = order
self.dtype = dtype
self._build()
def _build(self):
nc, bs = self.nc, self.bs
self.cosmo = flowpm.cosmology.Planck15(**self.cosmodict)
self.klin = np.logspace(-5, 3, 2000)
self.plin = linear_matter_power(self.cosmo, self.klin)
self.ipklin = iuspline(self.klin, self.plin)
#kvec = tools.fftk((nc, nc, nc), boxsize=bs, symmetric=False)
kvec = flowpm.kernels.fftk([nc, nc, nc], symmetric=False)
self.kmesh = (sum((k*nc/bs)**2 for k in kvec)**0.5).astype(self.dtype)
self.pkmesh = self.ipklin(self.kmesh).astype(self.dtype)
self.pkmesh_r = self.ipklin(self.kmesh)[:, :, :nc//2+1].astype(self.dtype)
@tf.function
def pm(self, linear, cosmodict=None):
print("PM graph")
if cosmodict is None:
cosmo = flowpm.cosmology.Planck15(**self.cosmodict)
else:
cosmo = flowpm.cosmology.Planck15(**cosmodict)
if self.donbody:
print('Nobdy sim')
state = lpt_init(cosmo, linear, a=self.a0, order=self.order)
final_state = nbody(cosmo, state, self.stages, self.nc)
else:
print('ZA/2LPT sim')
final_state = lpt_init(cosmo, linear, a=self.af, order=self.order)
tfinal_field = cic_paint(tf.zeros_like(linear), final_state[0])
return tfinal_field
@tf.function
def z_to_lin(self, z):
whitec = r2c3d(z* self.nc**1.5)
lineark = tf.multiply(
whitec, tf.cast((self.pkmesh / (self.bs**3))**0.5, whitec.dtype))
linear = c2r3d(lineark)
return linear
@tf.function
def pmz(self, z):
print("PM graph")
linear = self.z_to_lin(z)
return self.pm(linear)
@tf.function
def pmzk(self, zk):
print("PM graph")
linear = self.zk_to_lin(zk)
return self.pm(linear)
@tf.function
def zk_to_z(self, zk):
#print("zk to linl : ", zk.shape, self.pkmesh_r.shape)
zkn = zk * (self.nc**3/2)**0.5
zknc = tf.complex(zkn[..., 0], zkn[..., 1])
z = tf.signal.irfft3d(zknc)
return z
@tf.function
def z_to_zk(self, z):
zknc = tf.signal.rfft3d(z)
zkn0, zkn1 = tf.math.real(zknc), tf.math.imag(zknc)
zkn = tf.stack([zkn0, zkn1], -1)
zk = zkn / (self.nc**3/2.)**0.5
return zk
@tf.function
def zk_to_link(self, zk):
#print("zk to linl : ", zk.shape, self.pkmesh_r.shape)
zkn = zk * (self.nc**3/2)**0.5
link = zkn * (tf.expand_dims(self.pkmesh_r, -1)/self.bs**3)**0.5 * self.nc**1.5
return link
@tf.function
def zk_to_lin(self, zk):
#print("zk to lin : ", zk.shape)
link = self.zk_to_link(zk)
linkc = tf.complex(link[..., 0], link[..., 1])
lin = tf.signal.irfft3d(linkc)
return lin
@tf.function
def link_to_zk(self, link):
link = link / self.nc**1.5
zkn = link / (tf.expand_dims(tf.cast(self.pkmesh_r, link.dtype), -1) /self.bs**3)**0.5
zk = zkn / (self.nc**3/2)**0.5
return zk
@tf.function
def lin_to_zk(self, lin):
linkc = tf.signal.rfft3d(lin)
link0, link1 = tf.math.real(linkc), tf.math.imag(linkc)
link = tf.stack([link0, link1], -1)
return self.link_to_zk(link)
@tf.function
def zdist(self):
return tfd.Normal(0, 1)
#@tf.function
#def zkdist(self):
#self.scalezk = nc**1.5/2**0.5
#return tf.Normal(0, self.scalezk)
class Evolve_bias():
def __init__(self, nc, bs, a0=0.1, af=1., nsteps=5, donbody=False, order=2, cosmodict=None, dtype=np.float32):
self.nc, self.bs = nc, bs
#self.ipklin = ipklin
self.a0, self.af, self.nsteps = a0, af, nsteps
self.stages = np.linspace(a0, af, nsteps, endpoint=True)
self.donbody = donbody
if cosmodict is None: self.cosmodict = flowpm.cosmology.Planck15().to_dict()
else: self.cosmodict = cosmodict
self.order = order
self.dtype = dtype
self._build()
def _build(self):
nc, bs = self.nc, self.bs
self.cosmo = flowpm.cosmology.Planck15(**self.cosmodict)
self.klin = np.logspace(-5, 3, 2000)
self.plin = linear_matter_power(self.cosmo, self.klin)
self.ipklin = iuspline(self.klin, self.plin)
#kvec = tools.fftk((nc, nc, nc), boxsize=bs, symmetric=False)
kvec = flowpm.kernels.fftk([nc, nc, nc], symmetric=False)
self.kmesh = (sum((k*nc/bs)**2 for k in kvec)**0.5).astype(self.dtype)
self.pkmesh = self.ipklin(self.kmesh).astype(self.dtype)
self.pkmesh_r = self.ipklin(self.kmesh)[:, :, :nc//2+1].astype(self.dtype)
@tf.function
def pm(self, linear, cosmodict=None, returnpos=False):
print("PM graph")
if cosmodict is None:
cosmo = flowpm.cosmology.Planck15(**self.cosmodict)
else:
cosmo = flowpm.cosmology.Planck15(**cosmodict)
if self.donbody:
print('Nobdy sim')
state = lpt_init(cosmo, linear, a=self.a0, order=self.order)
final_state = nbody(cosmo, state, self.stages, self.nc)
else:
print('ZA/2LPT sim')
final_state = lpt_init(cosmo, linear, a=self.af, order=self.order)
tfinal_field = cic_paint(tf.zeros_like(linear), final_state[0])
if returnpos: return tfinal_field, final_state[0]
return tfinal_field
@tf.function
def biasfield(self, linear, bias, cosmodict=None):
print("PM graph")
if cosmodict is None:
cosmo = flowpm.cosmology.Planck15(**self.cosmodict)
else:
cosmo = flowpm.cosmology.Planck15(**cosmodict)
if self.donbody:
print('Nobdy sim')
state = lpt_init(cosmo, linear, a=self.a0, order=self.order)
final_state = nbody(cosmo, state, self.stages, self.nc)
else:
print('ZA/2LPT sim')
final_state = lpt_init(cosmo, linear, a=self.af, order=self.order)
b1, b2 = bias[0], bias[1]
fpos = final_state[0]
w0 = tf.reshape(linear, (linear.shape[0], -1))
w0 = w0 - tf.expand_dims(tf.reduce_mean(w0, 1), -1)
w2 = w0*w0
w2 = w2 - tf.expand_dims(tf.reduce_mean(w2, 1), -1)
weight = b1*w0 + b2*w2
bmodel = cic_paint(tf.zeros_like(linear), fpos, weight = weight)
return bmodel
@tf.function
def z_to_lin(self, z):
whitec = r2c3d(z* self.nc**1.5)
lineark = tf.multiply(
whitec, tf.cast((self.pkmesh / (self.bs**3))**0.5, whitec.dtype))
linear = c2r3d(lineark)
return linear
@tf.function
def lin_to_z(self, linear):
lineark= r2c3d(linear)
whitec = tf.multiply(
lineark, 1/tf.cast((self.pkmesh / (self.bs**3))**0.5, lineark.dtype))
z = c2r3d(whitec)
z = z/ self.nc**1.5
return z
@tf.function
def pmz(self, z):
print("PM graph")
linear = self.z_to_lin(z)
return self.pm(linear)
@tf.function
def pmzk(self, zk):
print("PM graph")
linear = self.zk_to_lin(zk)
return self.pm(linear)
@tf.function
def zk_to_z(self, zk):
#print("zk to linl : ", zk.shape, self.pkmesh_r.shape)
zkn = zk * (self.nc**3/2)**0.5
zknc = tf.complex(zkn[..., 0], zkn[..., 1])
z = tf.signal.irfft3d(zknc)
return z
@tf.function
def z_to_zk(self, z):
zknc = tf.signal.rfft3d(z)
zkn0, zkn1 = tf.math.real(zknc), tf.math.imag(zknc)
zkn = tf.stack([zkn0, zkn1], -1)
zk = zkn / (self.nc**3/2.)**0.5
return zk
@tf.function
def zk_to_link(self, zk):
#print("zk to linl : ", zk.shape, self.pkmesh_r.shape)
zkn = zk * (self.nc**3/2)**0.5
link = zkn * (tf.expand_dims(self.pkmesh_r, -1)/self.bs**3)**0.5 * self.nc**1.5
return link
@tf.function
def zk_to_lin(self, zk):
#print("zk to lin : ", zk.shape)
link = self.zk_to_link(zk)
linkc = tf.complex(link[..., 0], link[..., 1])
lin = tf.signal.irfft3d(linkc)
return lin
@tf.function
def link_to_zk(self, link):
link = link / self.nc**1.5
zkn = link / (tf.expand_dims(tf.cast(self.pkmesh_r, link.dtype), -1) /self.bs**3)**0.5
zk = zkn / (self.nc**3/2)**0.5
return zk
@tf.function
def lin_to_zk(self, lin):
linkc = tf.signal.rfft3d(lin)
link0, link1 = tf.math.real(linkc), tf.math.imag(linkc)
link = tf.stack([link0, link1], -1)
return self.link_to_zk(link)
@tf.function
def zdist(self):
return tfd.Normal(0, 1)
| 31.903846
| 114
| 0.58037
| 1,436
| 9,954
| 3.942201
| 0.110724
| 0.027557
| 0.05741
| 0.039569
| 0.863098
| 0.859389
| 0.846494
| 0.831302
| 0.831302
| 0.81664
| 0
| 0.032499
| 0.273558
| 9,954
| 311
| 115
| 32.006431
| 0.75038
| 0.061382
| 0
| 0.857759
| 0
| 0
| 0.017156
| 0.004718
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12069
| false
| 0
| 0.051724
| 0.008621
| 0.284483
| 0.056034
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7c60d3360b1e9044ac3aeb59c4021dddff6a5dc
| 7,680
|
py
|
Python
|
tests/test_030_alertmanagement.py
|
arproio/kpitest
|
43e015a57ad1bdcf16c19f11ac3cac6c5e20e212
|
[
"MIT"
] | null | null | null |
tests/test_030_alertmanagement.py
|
arproio/kpitest
|
43e015a57ad1bdcf16c19f11ac3cac6c5e20e212
|
[
"MIT"
] | null | null | null |
tests/test_030_alertmanagement.py
|
arproio/kpitest
|
43e015a57ad1bdcf16c19f11ac3cac6c5e20e212
|
[
"MIT"
] | null | null | null |
import sys
import requests
import pytest
import os
import json
import logging
import allure
from kpitest.thingworx import ThingworxServer
from .conftest import log_testcase,log_ret,log_input
@pytest.mark.order3
#@pytest.mark.incremental
@allure.feature("Alert Management")
class TestClass:
@log_testcase
def test_RetrieveAllAlertsByUserId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','RetrieveAllAlertsByUserId')
jsonbody={
"UserId":"Administrator",
"HistoricalDays":30
}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
data = json.loads(ret.text)
#assert(len(data['rows'])>0)
jsonbody={
"UserId":"FakeUser"
}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 200)
data = json.loads(ret.text)
assert (len(data['rows'])==0)
@log_testcase
def test_RetrieveConfiguredAlertsByAssetId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','RetrieveConfiguredAlertsByAssetId')
jsonbody={"AssetInfo": [
{"assetId": "Asset_RT_RTTFT_VR8600E_0724366", "lineId": "1", "LineName": "p54284"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
data = json.loads(ret.text)
assert(len(data['rows'])>0)
jsonbody = {"AssetInfo": [
{"assetId": "Asset_RT_RTTFT_VR8600E_0724366", "lineId": "fake", "LineName": "fake"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
@log_testcase
def test_RetrieveConfiguredAlertsByLineId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','RetrieveConfiguredAlertsByLineId')
jsonbody={"LineInfo": [
{"lineId": "1", "LineName": "p54284"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
data = json.loads(ret.text)
assert(len(data['rows'])>0)
jsonbody = {"LineInfo": [
{"lineId": "fake", "LineName": "fake"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
@log_testcase
def test_SubscribeToAlertByUserIdAssetId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','SubscribeToAlertByUserIdAssetId')
jsonbody={"SubscriptionInfo": [
{"userId": "p54284",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh",
"assetId": "Asset_RT_RTTFT_VR8600E_0724366"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
jsonbody = {"SubscriptionInfo": [
{"userId": "fake",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh",
"assetId": "Asset_RT_RTTFT_VR8600E_0724366"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
@log_testcase
def test_SubscribeToAlertByUserIdLineId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','SubscribeToAlertByUserIdLineId')
jsonbody={
"SubscriptionInfo": [
{
"userId": "p54284",
"alertId":"Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh--1--p54284",
"lineId": "1"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
jsonbody = {
"SubscriptionInfo": [
{
"userId": "fake",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh--1--p54284",
"lineId": "1"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
@log_testcase
def test_UnsubscribeFromAlertByUserIdAssetId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','UnsubscribeFromAlertByUserIdAssetId')
jsonbody={
"SubscriptionInfo": [
{
"userId": "p54284",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh",
"assetId": "Asset_RT_RTTFT_VR8600E_0724366"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
jsonbody = {
"SubscriptionInfo": [
{
"userId": "fake",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh",
"assetId": "Asset_RT_RTTFT_VR8600E_0724366"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
@log_testcase
def test_UnsubscribeFromAlertByUserIdLineId(self, testServer):
url = testServer.get_thing_service('Alert_Management_Util','UnsubscribeFromAlertByUserIdLineId')
jsonbody={
"SubscriptionInfo": [
{
"userId": "p54284",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh--1--p54284",
"lineId": "1"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(),json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert(ret.status_code == 200)
jsonbody = {
"SubscriptionInfo": [
{
"userId": "fake",
"alertId": "Asset_RT_RTTFT_VR8600E_0724366--param_sealvoltage--Voltagetoohigh--1--p54284",
"lineId": "1"}
]}
log_input(jsonbody)
ret = requests.request('POST', url, headers=testServer.get_headers(), json=jsonbody, verify=testServer.validateSSL)
log_ret(ret)
assert (ret.status_code == 500)
| 37.463415
| 123
| 0.616927
| 730
| 7,680
| 6.264384
| 0.110959
| 0.059698
| 0.048983
| 0.058168
| 0.825497
| 0.825497
| 0.825497
| 0.825497
| 0.825497
| 0.805379
| 0
| 0.046499
| 0.263542
| 7,680
| 205
| 124
| 37.463415
| 0.762023
| 0.006641
| 0
| 0.736527
| 0
| 0
| 0.223388
| 0.145648
| 0
| 0
| 0
| 0
| 0.101796
| 1
| 0.041916
| false
| 0
| 0.053892
| 0
| 0.101796
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d7ffaa619df851f5d882a621ae1bf7849e2c2e27
| 4,495
|
py
|
Python
|
tests/test_data/world_tests/pattern_matched/expected.py
|
Beefy-Swain/pytiled_parser
|
ba959ded16749136563fe6aafbdcdf2129c04eb0
|
[
"MIT"
] | 13
|
2019-05-01T01:22:08.000Z
|
2020-11-12T05:37:59.000Z
|
tests/test_data/world_tests/pattern_matched/expected.py
|
Beefy-Swain/pytiled_parser
|
ba959ded16749136563fe6aafbdcdf2129c04eb0
|
[
"MIT"
] | 30
|
2019-05-27T20:07:08.000Z
|
2021-06-12T20:52:31.000Z
|
tests/test_data/world_tests/pattern_matched/expected.py
|
benjamin-kirkbride/pytiled_parser
|
ba959ded16749136563fe6aafbdcdf2129c04eb0
|
[
"MIT"
] | 15
|
2019-05-02T17:57:42.000Z
|
2021-04-12T01:56:26.000Z
|
from pathlib import Path
from pytiled_parser import common_types, layer, tiled_map, tileset, world
EXPECTED = world.World(
only_show_adjacent=False,
maps=[
world.WorldMap(
size=common_types.Size(160, 160),
coordinates=common_types.OrderedPair(0, 0),
tiled_map=tiled_map.TiledMap(
map_file=Path(Path(__file__).parent / "map_p0-n0.json")
.absolute()
.resolve(),
infinite=False,
map_size=common_types.Size(5, 5),
next_layer_id=2,
next_object_id=1,
orientation="orthogonal",
render_order="right-down",
tiled_version="1.6.0",
tile_size=common_types.Size(32, 32),
version="1.6",
tilesets={
1: tileset.Tileset(
columns=8,
image=Path(
Path(__file__).parent
/ "../../images/tmw_desert_spacing.png"
)
.absolute()
.resolve(),
image_width=265,
image_height=199,
firstgid=1,
margin=1,
spacing=1,
name="tileset",
tile_count=48,
tiled_version="1.6.0",
tile_height=32,
tile_width=32,
version="1.6",
type="tileset",
)
},
layers=[
layer.TileLayer(
name="Tile Layer 1",
opacity=1,
visible=True,
id=1,
size=common_types.Size(5, 5),
data=[
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
],
)
],
),
),
world.WorldMap(
size=common_types.Size(160, 160),
coordinates=common_types.OrderedPair(0, 160),
tiled_map=tiled_map.TiledMap(
map_file=Path(Path(__file__).parent / "map_p0-n1.json")
.absolute()
.resolve(),
infinite=False,
map_size=common_types.Size(5, 5),
next_layer_id=2,
next_object_id=1,
orientation="orthogonal",
render_order="right-down",
tiled_version="1.6.0",
tile_size=common_types.Size(32, 32),
version="1.6",
tilesets={
1: tileset.Tileset(
columns=8,
image=Path(
Path(__file__).parent
/ "../../images/tmw_desert_spacing.png"
)
.absolute()
.resolve(),
image_width=265,
image_height=199,
firstgid=1,
margin=1,
spacing=1,
name="tileset",
tile_count=48,
tiled_version="1.6.0",
tile_height=32,
tile_width=32,
version="1.6",
type="tileset",
)
},
layers=[
layer.TileLayer(
name="Tile Layer 1",
opacity=1,
visible=True,
id=1,
size=common_types.Size(5, 5),
data=[
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
[30, 30, 30, 30, 30],
],
)
],
),
),
],
)
| 36.25
| 73
| 0.336374
| 353
| 4,495
| 4.082153
| 0.21813
| 0.133241
| 0.191534
| 0.244275
| 0.909091
| 0.909091
| 0.909091
| 0.909091
| 0.909091
| 0.909091
| 0
| 0.104016
| 0.567964
| 4,495
| 123
| 74
| 36.544715
| 0.638002
| 0
| 0
| 0.85124
| 0
| 0
| 0.049388
| 0.015573
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016529
| 0
| 0.016529
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0bc98db0124bad054723148bb5576fafdcee11d3
| 18,825
|
py
|
Python
|
tests/test_pynonymize.py
|
der-gabe/pynonymizer
|
3e53bb1f27c2446672f7c2794009354dc8d95ace
|
[
"MIT"
] | null | null | null |
tests/test_pynonymize.py
|
der-gabe/pynonymizer
|
3e53bb1f27c2446672f7c2794009354dc8d95ace
|
[
"MIT"
] | null | null | null |
tests/test_pynonymize.py
|
der-gabe/pynonymizer
|
3e53bb1f27c2446672f7c2794009354dc8d95ace
|
[
"MIT"
] | null | null | null |
import pytest
import unittest
from unittest.mock import patch, Mock, mock_open
from pynonymizer.cli import cli
from pynonymizer.pynonymize import ArgumentValidationError, DatabaseConnectionError, pynonymize
from types import SimpleNamespace
def test_pynonymize_missing_db_credentials():
with pytest.raises(ArgumentValidationError):
pynonymize(
input_path="input.sql",
strategyfile_path="strategyfile.yml",
output_path="output.sql",
db_user=None,
db_password=None
)
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.pynonymize.read_config")
@patch("pynonymizer.pynonymize.get_provider")
@patch("pynonymizer.pynonymize.FakeColumnGenerator")
@patch("pynonymizer.pynonymize.StrategyParser")
@patch("builtins.open", mock_open(read_data="TESTFILEDATA"))
class MainProcessTests(unittest.TestCase):
def test_any_db_kwarg(self, StrategyParser, FakeColumnSet, get_provider, read_config):
"""
test that dynamic args are passed to the provider properly e.g. mssql_blah
"""
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="mssql",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
mysql_other_amazing_var="TEST_DYNAMIC_VAR", # as this is mssql, this should be ignored
mssql_special_provider_var="TEST_DYNAMIC_VAR2"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="mssql", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150, special_provider_var="TEST_DYNAMIC_VAR2")
def test_pynonymize_main_process(self, StrategyParser, FakeColumnSet, get_provider, read_config):
"""
a rough smoke test for the cli process. This needs an integration test to back it up.
"""
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
seed_rows=999
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=999)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_pynonymize_only_step(self, StrategyParser, FakeColumnSet, get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
only_step="ANONYMIZE_DB"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_pynonymize_stop_at_step(self, StrategyParser, FakeColumnSet, get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
stop_at_step="ANONYMIZE_DB"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_pynonymize_skip_steps(self, StrategyParser, FakeColumnSet, get_provider,
read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
skip_steps=["ANONYMIZE_DB", "CREATE_DB", "DUMP_DB"]
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_pynonymize_start_at_step(self, StrategyParser, FakeColumnSet, get_provider,
read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="ANONYMIZE_DB"
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
@patch("dotenv.find_dotenv", Mock())
@patch("dotenv.load_dotenv", Mock())
@patch("pynonymizer.pynonymize.read_config")
@patch("pynonymizer.pynonymize.get_provider")
@patch("pynonymizer.pynonymize.FakeColumnGenerator")
@patch("pynonymizer.pynonymize.StrategyParser")
@patch("builtins.open", mock_open(read_data="TESTFILEDATA"))
class OptionalArgumentsSkippedTests(unittest.TestCase):
"""
pynonymize should not throw argument validation errors for missing "mandatory" args
that are only mandatory for certain steps.
START = 0
GET_SOURCE = 100
CREATE_DB = 200
RESTORE_DB = 300
ANONYMIZE_DB = 400
DUMP_DB = 500
DROP_DB = 600
END = 9999
"""
def test_optional_input_when_skip_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["RESTORE_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_input_when_start_at_after_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="ANONYMIZE_DB",
stop_at_step=None,
skip_steps=None
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_input_when_stop_at_before_input_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path=None,
strategyfile_path="TEST_STRATEGYFILE",
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step="CREATE_DB",
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_optional_strategyfile_when_skip_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["ANONYMIZE_DB"]
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_strategyfile_when_start_at_after_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="DUMP_DB",
stop_at_step=None,
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_called()
provider.drop_database.assert_called()
def test_optional_strategyfile_when_stop_at_before_anonymize(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path=None,
output_path="TEST_OUTPUT",
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step="RESTORE_DB",
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_not_called()
def test_optional_output_when_skip_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["DUMP_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_optional_output_when_start_at_after_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step="DROP_DB",
stop_at_step=None,
skip_steps=None
)
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_not_called()
provider.restore_database.assert_not_called()
provider.anonymize_database.assert_not_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
def test_optional_output_when_stop_at_before_output_steps(self, StrategyParser, FakeColumnSet,
get_provider, read_config):
pynonymize(
input_path="TEST_INPUT",
strategyfile_path="TEST_STRATEGYFILE",
output_path=None,
db_type="TEST_TYPE",
db_host="TEST_HOST",
db_port="TEST_PORT",
db_name="TEST_NAME",
db_user="TEST_USER",
db_password="TEST_PASSWORD",
fake_locale="TEST_LOCALE",
start_at_step=None,
stop_at_step=None,
skip_steps=["DUMP_DB"]
)
StrategyParser.return_value.parse_config.assert_called()
get_provider.assert_called_with(type="TEST_TYPE", db_host="TEST_HOST", db_port="TEST_PORT", db_user="TEST_USER", db_pass="TEST_PASSWORD", db_name="TEST_NAME", seed_rows=150)
provider = get_provider.return_value
provider.create_database.assert_called()
provider.restore_database.assert_called()
provider.anonymize_database.assert_called()
provider.dump_database.assert_not_called()
provider.drop_database.assert_called()
| 43.077803
| 219
| 0.650624
| 2,170
| 18,825
| 5.200461
| 0.062673
| 0.086841
| 0.069118
| 0.063181
| 0.903943
| 0.901817
| 0.893664
| 0.884448
| 0.882056
| 0.880284
| 0
| 0.005202
| 0.254502
| 18,825
| 436
| 220
| 43.176606
| 0.798917
| 0.023798
| 0
| 0.833773
| 0
| 0
| 0.164186
| 0.016194
| 0
| 0
| 0
| 0
| 0.248021
| 1
| 0.042216
| false
| 0.081794
| 0.015831
| 0
| 0.063325
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0bcd8ad77cca24cf96e69ab0c0d07d7b22dd5e5c
| 22,183
|
py
|
Python
|
src/algorithms/donut.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/algorithms/donut.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
src/algorithms/donut.py
|
agnes-yang/DeepADoTS
|
4a52caf4e49bad8e057649ca05ea9522c77518fb
|
[
"MIT"
] | null | null | null |
<<<<<<< HEAD
import sys
import numpy as np
import pandas as pd
import six
import tensorflow as tf
from donut import DonutTrainer, DonutPredictor, Donut as DonutModel, complete_timestamp, standardize_kpi
from donut.augmentation import MissingDataInjection
from donut.utils import BatchSlidingWindow
from tensorflow import keras as K
from tfsnippet.modules import Sequential
from tfsnippet.scaffold import TrainLoop
from tfsnippet.utils import (get_default_session_or_error,
ensure_variables_initialized)
from tqdm import trange
from .algorithm_utils import Algorithm, TensorflowUtils
class QuietDonutTrainer(DonutTrainer):
def fit(self, values, labels, missing, mean, std, excludes=None,
valid_portion=0.3, summary_dir=None):
"""
Train the :class:`Donut` model with given data.
From https://github.com/haowen-xu/donut/blob/master/donut/training.py but without prints.
Args:
values (np.ndarray): 1-D `float32` array, the standardized
KPI observations.
labels (np.ndarray): 1-D `int32` array, the anomaly labels.
missing (np.ndarray): 1-D `int32` array, the indicator of
missing points.
mean (float): The mean of KPI observations before standardization.
std (float): The standard deviation of KPI observations before
standardization.
excludes (np.ndarray): 1-D `bool` array, indicators of whether
or not to totally exclude a point. If a point is excluded,
any window which contains that point is excluded.
(default :obj:`None`, no point is totally excluded)
valid_portion (float): Ratio of validation data out of all the
specified training data. (default 0.3)
summary_dir (str): Optional summary directory for
:class:`tf.summary.FileWriter`. (default :obj:`None`,
summary is disabled)
"""
sess = get_default_session_or_error()
# split the training & validation set
values = np.asarray(values, dtype=np.float32)
labels = np.asarray(labels, dtype=np.int32)
missing = np.asarray(missing, dtype=np.int32)
if len(values.shape) != 1:
raise ValueError('`values` must be a 1-D array')
if labels.shape != values.shape:
raise ValueError('The shape of `labels` does not agree with '
'the shape of `values` ({} vs {})'.
format(labels.shape, values.shape))
if missing.shape != values.shape:
raise ValueError('The shape of `missing` does not agree with '
'the shape of `values` ({} vs {})'.
format(missing.shape, values.shape))
n = int(len(values) * valid_portion)
train_values, v_x = values[:-n], values[-n:]
train_labels, valid_labels = labels[:-n], labels[-n:]
train_missing, valid_missing = missing[:-n], missing[-n:]
v_y = np.logical_or(valid_labels, valid_missing).astype(np.int32)
if excludes is None:
train_excludes, valid_excludes = None, None
else:
train_excludes, valid_excludes = excludes[:-n], excludes[-n:]
# data augmentation object and the sliding window iterator
# If std is zero choose a number close to zero
aug = MissingDataInjection(mean, std, self._missing_data_injection_rate)
train_sliding_window = BatchSlidingWindow(
array_size=len(train_values),
window_size=self.model.x_dims,
batch_size=self._batch_size,
excludes=train_excludes,
shuffle=True,
ignore_incomplete_batch=True,
)
valid_sliding_window = BatchSlidingWindow(
array_size=len(v_x),
window_size=self.model.x_dims,
batch_size=self._valid_batch_size,
excludes=valid_excludes,
)
# initialize the variables of the trainer, and the model
sess.run(self._trainer_initializer)
ensure_variables_initialized(self._train_params)
# training loop
lr = self._initial_lr
# Side effect. EarlyStopping stores variables temporarely in a Temp dir
with TrainLoop(
param_vars=self._train_params,
early_stopping=True,
summary_dir=summary_dir,
max_epoch=self._max_epoch,
max_step=self._max_step) as loop: # type: TrainLoop
for epoch in loop.iter_epochs():
x, y1, y2 = aug.augment(
train_values, train_labels, train_missing)
y = np.logical_or(y1, y2).astype(np.int32)
train_iterator = train_sliding_window.get_iterator([x, y])
for step, (batch_x, batch_y) in loop.iter_steps(train_iterator):
# run a training step
feed_dict = dict(six.iteritems(self._feed_dict))
feed_dict[self._learning_rate] = lr
feed_dict[self._input_x] = batch_x
feed_dict[self._input_y] = batch_y
loss, _ = sess.run(
[self._loss, self._train_op], feed_dict=feed_dict)
loop.collect_metrics({'loss': loss})
if step % self._valid_step_freq == 0:
# collect variable summaries
if summary_dir is not None:
loop.add_summary(sess.run(self._summary_op))
# do validation in batches
with loop.timeit('valid_time'), loop.metric_collector('valid_loss') as mc:
v_it = valid_sliding_window.get_iterator([v_x, v_y])
for b_v_x, b_v_y in v_it:
feed_dict = dict(
six.iteritems(self._valid_feed_dict))
feed_dict[self._input_x] = b_v_x
feed_dict[self._input_y] = b_v_y
loss = sess.run(self._loss, feed_dict=feed_dict)
mc.collect(loss, weight=len(b_v_x))
# anneal the learning rate
if self._lr_anneal_epochs and epoch % self._lr_anneal_epochs == 0:
lr *= self._lr_anneal_factor
class Donut(Algorithm, TensorflowUtils):
"""For each feature, the anomaly score is set to 1 for a point if its reconstruction probability
is smaller than mean - std of the reconstruction probabilities for that feature. For each point
in time, the maximum of the scores of the features is taken to support multivariate time series as well."""
def __init__(self, num_epochs=256, batch_size=32, x_dims=120,
seed: int = None, gpu: int = None):
Algorithm.__init__(self, __name__, 'Donut', seed)
TensorflowUtils.__init__(self, seed, gpu)
self.max_epoch = num_epochs
self.x_dims = x_dims
self.batch_size = batch_size
self.means, self.stds, self.tf_sessions, self.models = [], [], [], []
def fit(self, X: pd.DataFrame):
with self.device:
# Reset all results from last run to avoid reusing variables
self.means, self.stds, self.tf_sessions, self.models = [], [], [], []
for col_idx in trange(len(X.columns)):
col = X.columns[col_idx]
tf_session = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
timestamps = X.index
features = X.loc[:, col].interpolate().bfill().values
labels = pd.Series(0, X.index)
timestamps, _, (features, labels) = complete_timestamp(timestamps, (features, labels))
missing = np.isnan(X.loc[:, col].values)
_, mean, std = standardize_kpi(features, excludes=np.logical_or(labels, missing))
with tf.variable_scope('model') as model_vs:
model = DonutModel(
h_for_p_x=Sequential([
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
]),
h_for_q_z=Sequential([
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
]),
x_dims=self.x_dims,
z_dims=5,
)
trainer = QuietDonutTrainer(model=model, model_vs=model_vs, max_epoch=self.max_epoch,
batch_size=self.batch_size, valid_batch_size=self.batch_size,
missing_data_injection_rate=0.0, lr_anneal_factor=1.0)
with tf_session.as_default():
trainer.fit(features, labels, missing, mean, std, valid_portion=0.25)
self.means.append(mean)
self.stds.append(std)
self.tf_sessions.append(tf_session)
self.models.append(model)
def predict(self, X: pd.DataFrame):
"""Since we predict the anomaly scores for each feature independently, we already return a binarized one-
dimensional anomaly score array."""
with self.device:
test_scores = np.zeros_like(X)
for col_idx, col in enumerate(X.columns):
mean, std, tf_session, model = \
self.means[col_idx], self.stds[col_idx], self.tf_sessions[col_idx], self.models[col_idx]
test_values, _, _ = standardize_kpi(X.loc[:, col], mean=mean, std=std)
test_missing = np.zeros_like(test_values)
predictor = DonutPredictor(model)
with tf_session.as_default():
test_score = predictor.get_score(test_values, test_missing)
# Convert to negative reconstruction probability so score is in accordance with other detectors
test_score = -np.power(np.e, test_score)
test_scores[self.x_dims - 1:, col_idx] = test_score
aggregated_test_scores = np.amax(test_scores, axis=1)
aggregated_test_scores[:self.x_dims - 1] = np.nanmin(aggregated_test_scores) - sys.float_info.epsilon
return aggregated_test_scores
=======
import sys
import numpy as np
import pandas as pd
import six
import tensorflow as tf
from donut import DonutTrainer, DonutPredictor, Donut as DonutModel, complete_timestamp, standardize_kpi
from donut.augmentation import MissingDataInjection
from donut.utils import BatchSlidingWindow
from tensorflow import keras as K
from tfsnippet.modules import Sequential
from tfsnippet.scaffold import TrainLoop
from tfsnippet.utils import (get_default_session_or_error,
ensure_variables_initialized)
from tqdm import trange
from .algorithm_utils import Algorithm, TensorflowUtils
class QuietDonutTrainer(DonutTrainer):
def fit(self, values, labels, missing, mean, std, excludes=None,
valid_portion=0.3, summary_dir=None):
"""
Train the :class:`Donut` model with given data.
From https://github.com/haowen-xu/donut/blob/master/donut/training.py but without prints.
Args:
values (np.ndarray): 1-D `float32` array, the standardized
KPI observations.
labels (np.ndarray): 1-D `int32` array, the anomaly labels.
missing (np.ndarray): 1-D `int32` array, the indicator of
missing points.
mean (float): The mean of KPI observations before standardization.
std (float): The standard deviation of KPI observations before
standardization.
excludes (np.ndarray): 1-D `bool` array, indicators of whether
or not to totally exclude a point. If a point is excluded,
any window which contains that point is excluded.
(default :obj:`None`, no point is totally excluded)
valid_portion (float): Ratio of validation data out of all the
specified training data. (default 0.3)
summary_dir (str): Optional summary directory for
:class:`tf.summary.FileWriter`. (default :obj:`None`,
summary is disabled)
"""
sess = get_default_session_or_error()
# split the training & validation set
values = np.asarray(values, dtype=np.float32)
labels = np.asarray(labels, dtype=np.int32)
missing = np.asarray(missing, dtype=np.int32)
if len(values.shape) != 1:
raise ValueError('`values` must be a 1-D array')
if labels.shape != values.shape:
raise ValueError('The shape of `labels` does not agree with '
'the shape of `values` ({} vs {})'.
format(labels.shape, values.shape))
if missing.shape != values.shape:
raise ValueError('The shape of `missing` does not agree with '
'the shape of `values` ({} vs {})'.
format(missing.shape, values.shape))
n = int(len(values) * valid_portion)
train_values, v_x = values[:-n], values[-n:]
train_labels, valid_labels = labels[:-n], labels[-n:]
train_missing, valid_missing = missing[:-n], missing[-n:]
v_y = np.logical_or(valid_labels, valid_missing).astype(np.int32)
if excludes is None:
train_excludes, valid_excludes = None, None
else:
train_excludes, valid_excludes = excludes[:-n], excludes[-n:]
# data augmentation object and the sliding window iterator
# If std is zero choose a number close to zero
aug = MissingDataInjection(mean, std, self._missing_data_injection_rate)
train_sliding_window = BatchSlidingWindow(
array_size=len(train_values),
window_size=self.model.x_dims,
batch_size=self._batch_size,
excludes=train_excludes,
shuffle=True,
ignore_incomplete_batch=True,
)
valid_sliding_window = BatchSlidingWindow(
array_size=len(v_x),
window_size=self.model.x_dims,
batch_size=self._valid_batch_size,
excludes=valid_excludes,
)
# initialize the variables of the trainer, and the model
sess.run(self._trainer_initializer)
ensure_variables_initialized(self._train_params)
# training loop
lr = self._initial_lr
# Side effect. EarlyStopping stores variables temporarely in a Temp dir
with TrainLoop(
param_vars=self._train_params,
early_stopping=True,
summary_dir=summary_dir,
max_epoch=self._max_epoch,
max_step=self._max_step) as loop: # type: TrainLoop
for epoch in loop.iter_epochs():
x, y1, y2 = aug.augment(
train_values, train_labels, train_missing)
y = np.logical_or(y1, y2).astype(np.int32)
train_iterator = train_sliding_window.get_iterator([x, y])
for step, (batch_x, batch_y) in loop.iter_steps(train_iterator):
# run a training step
feed_dict = dict(six.iteritems(self._feed_dict))
feed_dict[self._learning_rate] = lr
feed_dict[self._input_x] = batch_x
feed_dict[self._input_y] = batch_y
loss, _ = sess.run(
[self._loss, self._train_op], feed_dict=feed_dict)
loop.collect_metrics({'loss': loss})
if step % self._valid_step_freq == 0:
# collect variable summaries
if summary_dir is not None:
loop.add_summary(sess.run(self._summary_op))
# do validation in batches
with loop.timeit('valid_time'), loop.metric_collector('valid_loss') as mc:
v_it = valid_sliding_window.get_iterator([v_x, v_y])
for b_v_x, b_v_y in v_it:
feed_dict = dict(
six.iteritems(self._valid_feed_dict))
feed_dict[self._input_x] = b_v_x
feed_dict[self._input_y] = b_v_y
loss = sess.run(self._loss, feed_dict=feed_dict)
mc.collect(loss, weight=len(b_v_x))
# anneal the learning rate
if self._lr_anneal_epochs and epoch % self._lr_anneal_epochs == 0:
lr *= self._lr_anneal_factor
class Donut(Algorithm, TensorflowUtils):
"""For each feature, the anomaly score is set to 1 for a point if its reconstruction probability
is smaller than mean - std of the reconstruction probabilities for that feature. For each point
in time, the maximum of the scores of the features is taken to support multivariate time series as well."""
def __init__(self, num_epochs=256, batch_size=32, x_dims=120,
seed: int = None, gpu: int = None):
Algorithm.__init__(self, __name__, 'Donut', seed)
TensorflowUtils.__init__(self, seed, gpu)
self.max_epoch = num_epochs
self.x_dims = x_dims
self.batch_size = batch_size
self.means, self.stds, self.tf_sessions, self.models = [], [], [], []
def fit(self, X: pd.DataFrame):
with self.device:
# Reset all results from last run to avoid reusing variables
self.means, self.stds, self.tf_sessions, self.models = [], [], [], []
for col_idx in trange(len(X.columns)):
col = X.columns[col_idx]
tf_session = tf.Session(config=tf.ConfigProto(allow_soft_placement=True))
timestamps = X.index
features = X.loc[:, col].interpolate().bfill().values
labels = pd.Series(0, X.index)
timestamps, _, (features, labels) = complete_timestamp(timestamps, (features, labels))
missing = np.isnan(X.loc[:, col].values)
_, mean, std = standardize_kpi(features, excludes=np.logical_or(labels, missing))
with tf.variable_scope('model') as model_vs:
model = DonutModel(
h_for_p_x=Sequential([
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
]),
h_for_q_z=Sequential([
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
K.layers.Dense(100, kernel_regularizer=K.regularizers.l2(0.001),
activation=tf.nn.relu),
]),
x_dims=self.x_dims,
z_dims=5,
)
trainer = QuietDonutTrainer(model=model, model_vs=model_vs, max_epoch=self.max_epoch,
batch_size=self.batch_size, valid_batch_size=self.batch_size,
missing_data_injection_rate=0.0, lr_anneal_factor=1.0)
with tf_session.as_default():
trainer.fit(features, labels, missing, mean, std, valid_portion=0.25)
self.means.append(mean)
self.stds.append(std)
self.tf_sessions.append(tf_session)
self.models.append(model)
def predict(self, X: pd.DataFrame):
"""Since we predict the anomaly scores for each feature independently, we already return a binarized one-
dimensional anomaly score array."""
with self.device:
test_scores = np.zeros_like(X)
for col_idx, col in enumerate(X.columns):
mean, std, tf_session, model = \
self.means[col_idx], self.stds[col_idx], self.tf_sessions[col_idx], self.models[col_idx]
test_values, _, _ = standardize_kpi(X.loc[:, col], mean=mean, std=std)
test_missing = np.zeros_like(test_values)
predictor = DonutPredictor(model)
with tf_session.as_default():
test_score = predictor.get_score(test_values, test_missing)
# Convert to negative reconstruction probability so score is in accordance with other detectors
test_score = -np.power(np.e, test_score)
test_scores[self.x_dims - 1:, col_idx] = test_score
aggregated_test_scores = np.amax(test_scores, axis=1)
aggregated_test_scores[:self.x_dims - 1] = np.nanmin(aggregated_test_scores) - sys.float_info.epsilon
return aggregated_test_scores
>>>>>>> upstream/master
| 50.87844
| 114
| 0.571879
| 2,571
| 22,183
| 4.722676
| 0.124465
| 0.017131
| 0.010707
| 0.007248
| 0.998518
| 0.998518
| 0.998518
| 0.998518
| 0.998518
| 0.998518
| 0
| 0.01166
| 0.34274
| 22,183
| 435
| 115
| 50.995402
| 0.821125
| 0.048911
| 0
| 0.971246
| 0
| 0
| 0.024152
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.089457
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0bd0b486e5ef6357e30f33b2d18c94bb1153bd27
| 74,485
|
py
|
Python
|
PYex/Rambit/Rambit/source/Resources.py
|
iPatso/PyGameProjs
|
b1eed993649a1e90da5214a1604fdb59d65ff0b4
|
[
"Apache-2.0"
] | null | null | null |
PYex/Rambit/Rambit/source/Resources.py
|
iPatso/PyGameProjs
|
b1eed993649a1e90da5214a1604fdb59d65ff0b4
|
[
"Apache-2.0"
] | null | null | null |
PYex/Rambit/Rambit/source/Resources.py
|
iPatso/PyGameProjs
|
b1eed993649a1e90da5214a1604fdb59d65ff0b4
|
[
"Apache-2.0"
] | null | null | null |
import pygame
DEFAULT_SCREEN_SIZE = 800,600 # tamanho padrao da tela do jogo
BLACK = 0, 0, 0
WHITE = 255, 255, 255
MAGENTA = 255, 0, 255
MAP = {
'ts': "res/img/map.png", # tileset: o arquivo da imagem com os tilesets do mapa
'tw': 64, # tile width: largura do tile em pixels
'th': 64 # tile height: altura do tile em pixels
}
STAGE1 = {
'tl': [
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 20, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 20, 15, 16, 15, 16, 15, 16, 15, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 11, 35, 31, 32, 36, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 13, 14, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 9, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 23, 24, 21, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 13, 14, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 33, 35, 36, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 13, 14, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 33, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 13, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 34, 0, 28, 0, 0, 29, 30, 0, 0, 0, 29, 30, 0, 26, 0, 34, 0, 0, 0, 0, 0, 29, 30, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 35, 31, 32, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 21, 0, 0, 0, 0, 0, 0, 0, 0, 34, 29, 30, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 6, 5, 6, 5, 6, 5, 6, 5, 6, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 28, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 18, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 17, 6, 5, 1, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 9, 29, 30, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 17, 6, 5, 6, 5, 1, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 21, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 17, 5, 6, 1, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 17, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 24, 23, 24, 23, 24, 23, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 36, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 8, 7, 8, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 34, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 20, 13, 14, 19, 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 22, 21, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 11, 31, 32, 36, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 17, 18, 11, 0, 0, 0, 0, 0, 22, 23, 24, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 19, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 3, 35, 36, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 31, 32, 31, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 22, 23, 24, 23, 24, 23, 24, 23, 21, 0, 0, 0, 20, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 33, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 9, 31, 32, 35, 36, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 31, 32, 36, 0, 0, 0, 0, 0, 35, 36, 35, 20, 15, 16, 15, 16, 15, 16, 15, 13, 14, 8, 7, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 31, 32, 28, 36, 33, 35, 27, 31, 32, 0, 0, 0, 20, 15, 16, 19, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 28, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 24, 23, 21, 0, 35, 36, 0, 12, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 9, 35, 36, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 12, 13, 14, 13, 13, 14, 11, 31, 32, 35, 36, 33, 35, 31, 32, 12, 15, 16, 15, 15, 16, 9, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 9, 31, 32, 31, 32, 35, 36, 35, 36, 31, 32, 0, 35, 36, 27, 0, 0, 0, 26, 35, 36, 0, 0, 0, 20, 13, 14, 19, 32, 33, 31, 32, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 24, 21, 0, 0, 35, 33, 31, 32, 36, 0, 0, 0, 0, 10, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 8, 7, 8, 7, 3, 35, 36, 0, 0, 0, 0, 35, 36, 4, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 22, 23, 24, 23, 24, 23, 24, 20, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 11, 35, 36, 35, 36, 0, 0, 0, 0, 35, 36, 0, 0, 0, 28, 0, 0, 0, 27, 0, 0, 0, 0, 22, 20, 15, 16, 19, 36, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 26, 0, 0, 0, 0, 29, 30, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 21, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 4, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 35, 33, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 35, 33, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 31, 32, 31, 32, 36, 0, 0, 0, 0, 35, 20, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 28, 0, 0, 0, 0, 31, 20, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 18, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 12, 13, 14, 13, 14, 13, 14, 13, 14, 14, 9, 35, 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 35, 36, 35, 36, 0, 0, 0, 0, 0, 34, 20, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 27, 0, 0, 0, 0, 35, 20, 15, 16, 19, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 10, 9, 29, 30, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 0, 34, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 10, 15, 16, 15, 16, 15, 16, 15, 16, 16, 11, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 34, 0, 0, 34, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 28, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 22, 24, 24, 23, 20, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 26, 0, 0, 2, 5, 6, 18, 13, 14, 19, 23, 24, 23, 24, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 22, 23, 24, 23, 24, 23, 24, 21, 0, 0, 0, 22, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 21, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 22, 23, 24, 23, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 23, 24, 23, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 27, 0, 25, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 19, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 35, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 27, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 19, 32, 0, 0, 0, 0, 0, 10, 17, 18, 13, 14, 17, 18, 11, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 31, 20, 13, 14, 13, 14, 19, 32, 0, 0, 0, 0, 35, 36, 31, 32, 35, 36, 31, 32, 2, 5, 6, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 18, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 34, 29, 30, 0, 0, 0, 0, 34, 26, 29, 30, 0, 0, 0, 0, 29, 30, 27, 34, 0, 0, 0, 0, 35, 36, 31, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 19, 32, 31, 32, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 25, 29, 30, 0, 0, 27, 34, 28, 34, 26, 29 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 19, 32, 31, 32, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 35, 36, 35, 35, 28, 0, 36, 36, 0, 31, 32, 0, 35, 36, 35, 36, 27, 31, 32, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 28, 34, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 19, 36, 0, 0, 0, 0, 0, 12, 13, 14, 15, 16, 13, 14, 9, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 35, 20, 15, 16, 15, 16, 19, 36, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 35, 36, 4, 7, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 22, 23, 24, 21, 0, 0, 0, 0, 29, 30, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 8, 7, 8, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 35, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 19, 36, 35, 36, 0, 0, 0, 0, 2, 5, 6, 5, 6, 6, 5, 6, 5, 6, 5, 6, 5, 5, 6, 5, 6 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 19, 36, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 28, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 2, 5, 6, 5, 6, 18, 13, 14, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 19, 0, 0, 0, 0, 0, 0, 10, 15, 16, 13, 14, 15, 16, 11, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 20, 13, 14, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 19, 31, 32, 0, 0, 35, 36, 35, 36, 0, 0, 0, 2, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 12, 13, 14, 13, 9, 36, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 19, 0, 0, 0, 0, 0, 0, 0, 10, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 29, 30, 12, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 15, 16, 16, 19, 0, 0, 0, 0, 0, 0, 12, 13, 14, 15, 16, 13, 14, 9, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 20, 15, 16, 15, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 9, 34, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 4, 7, 8, 7, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 13, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 16, 9, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 19, 0, 0, 0, 0, 0, 0, 0, 12, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 19, 30, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 28, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 2, 5, 6, 5, 6, 18, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 19, 0, 0, 0, 0, 0, 0, 10, 15, 16, 13, 14, 15, 16, 11, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 9, 0, 0, 0, 0, 20, 13, 14, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 15, 16, 15, 16, 17, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 20, 13, 19, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 7, 3, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 19, 0, 0, 0, 0, 0, 0, 0, 10, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 19, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 18, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 16, 19, 0, 0, 0, 0, 0, 0, 12, 13, 14, 15, 16, 13, 14, 9, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 11, 0, 0, 0, 0, 20, 15, 16, 15, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 29, 30, 0, 0, 0, 35, 20, 14, 19, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 19, 0, 0, 0, 0, 0, 0, 0, 12, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 19, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 14, 19, 0, 0, 0, 0, 0, 0, 10, 15, 16, 13, 14, 15, 16, 11, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 9, 0, 0, 0, 0, 20, 13, 14, 13, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 11, 0, 0, 2, 5, 6, 1, 0, 0, 0, 0, 20, 13, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 15, 16, 13, 14, 15, 16, 13, 14, 15, 16, 13, 14, 15, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 19, 0, 0, 0, 0, 0, 0, 0, 10, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ]
], # tiles list: uma lista contendo os tiles do mapa
'ct': 24, # collision tiles: os tiles que sao colididos pelo personagem, este numero indica o ultimo tile colidivel na imagem do tileset, ou seja, os tiles de 1 a 24 sao colidiveis
'mw': 300, # map width: largura do mapa em numero de tiles (numero de colunas)
'mh': 30, # map height: altura do mapa em numero de tiles (numero de linhas)
'tw': 64, # tile width: largura do tile em pixels
'th': 64 # tile height: altura do tile em pixels
}
STAGE2 = {
'tl': [
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 23, 24, 21, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 33, 35, 36, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
[ 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0 ],
[ 30, 0, 0, 0, 0, 34, 28, 34, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0 ],
[ 6, 5, 6, 5, 6, 5, 6, 5, 6, 6, 5, 6, 1, 0, 0, 0, 22, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 13, 14, 9, 0, 0, 0, 35, 33, 31, 32, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 15, 16, 11, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 9, 34, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 26, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 28, 0, 0, 0 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 17, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 27, 0, 25, 0 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 29, 30, 0, 0, 0, 0, 0, 25, 29, 30, 0, 0, 27, 34, 28, 34, 26, 29 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 11, 0, 29, 30, 0, 0, 0, 0, 34, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 2, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 5, 6, 5, 6 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 17, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 2, 6, 5, 18, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 34, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 24, 21, 0, 0, 0, 0, 0, 0, 22, 23, 24, 23, 24, 23, 21, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 17, 6, 5, 6, 5, 1, 0, 0, 0, 0, 0, 34, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 35, 36, 33, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 2, 5, 6, 1, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 2, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 12, 14, 13, 11, 0, 0, 0, 2, 5, 5, 6, 1, 0, 0, 12, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 10, 16, 15, 9, 0, 0, 0, 12, 14, 14, 13, 11, 0, 0, 10, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 12, 14, 13, 11, 0, 0, 0, 10, 16, 16, 15, 9, 0, 0, 12, 14, 11, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 34, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 10, 16, 15, 9, 0, 0, 0, 12, 14, 14, 13, 11, 0, 0, 10, 16, 9, 0, 0, 22, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 2, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 31, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 10, 16, 16, 15, 9, 0, 0, 12, 16, 11, 0, 0, 0, 31, 32, 0, 35, 36, 35, 36, 0, 31, 32, 0, 33, 35, 36, 0, 33, 31, 32, 31, 32, 0, 35, 36, 0, 33, 31, 32, 0, 0, 0, 0, 0, 0, 4, 14, 14, 15, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 35, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 10, 14, 13, 9, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 10, 14, 9, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 35, 36, 35, 36, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 31, 20, 16, 19, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 23, 24, 23, 21, 0, 0, 0, 22, 23, 24, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 10, 14, 14, 13, 9, 0, 0, 12, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 20, 14, 19, 36, 0, 0, 22, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 2, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 33, 35, 36, 0, 0, 0, 35, 0, 33, 36, 31, 32, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 10, 14, 13, 9, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 10, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 31, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 2, 6, 5, 6, 1, 0, 0, 0, 0, 4, 14, 14, 13, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 29, 30, 0, 0, 0, 0, 34, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 10, 14, 14, 13, 9, 0, 0, 12, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 35, 36, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 27, 0, 0, 0, 0, 4, 8, 14, 15, 3, 0, 0, 0, 0, 31, 20, 16, 19, 32, 31, 32, 0, 0, 0, 0, 0, 0, 0, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 4, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7, 8, 7 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 12, 14, 13, 11, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 12, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 22, 21, 0, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 6, 5, 6, 1, 0, 0, 31, 32, 20, 19, 32, 0, 0, 0, 0, 35, 20, 14, 19, 36, 35, 36, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13 ],
[ 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 10, 16, 15, 9, 0, 0, 0, 10, 16, 16, 15, 9, 0, 0, 10, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 22, 23, 24, 23, 24, 23, 24, 21, 34, 0, 0, 26, 0, 0, 0, 0, 34, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 30, 34, 0, 0, 28, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 0, 29, 30, 0, 0, 0, 0, 29, 30, 0, 0, 34, 0, 29, 30, 0, 0, 0, 0, 0, 4, 14, 14, 15, 3, 0, 0, 35, 36, 20, 19, 36, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 17, 6, 1, 34, 0, 0, 0, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 15 ],
[ 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 14, 13, 14, 13, 14, 11, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 12, 14, 14, 13, 11, 0, 0, 12, 14, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 31, 32, 35, 36, 36, 35, 2, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 5, 6, 1, 0, 0, 0, 31, 20, 16, 19, 32, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 17, 5, 6, 1, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 13, 14, 13, 14, 13, 14, 13, 13, 14 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 16, 15, 16, 15, 16, 9, 0, 0, 0, 0, 10, 14, 13, 9, 0, 0, 0, 10, 16, 16, 15, 9, 0, 0, 10, 16, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 0, 0, 0, 0, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 35, 20, 14, 19, 36, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 13, 9, 0, 28, 0, 0, 0, 34, 0, 0, 0, 0, 0, 0, 0, 34, 0, 0, 0, 29, 30, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 15 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 12, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 15, 17, 6, 5, 6, 5, 6, 1, 0, 0, 0, 0, 0, 0, 0, 22, 23, 24, 23, 24, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 10, 16, 15, 9, 0, 0, 0, 10, 14, 14, 13, 9, 0, 0, 10, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 7, 8, 7, 8, 7, 8, 3, 0, 0, 0, 0, 0, 0, 0, 35, 31, 32, 31, 32, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14 ],
[ 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 12, 14, 13, 11, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 12, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 11, 31, 22, 23, 24, 21, 32, 0, 0, 0, 0, 0, 0, 0, 0, 35, 36, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 13 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 10, 16, 15, 9, 0, 0, 0, 10, 14, 14, 13, 9, 0, 0, 10, 14, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 9, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 19, 11, 35, 35, 36, 31, 32, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 14, 13, 14, 13 ],
[ 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 12, 16, 15, 11, 0, 0, 0, 12, 16, 16, 15, 11, 0, 0, 12, 16, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 11, 0, 0, 0, 0, 20, 16, 19, 0, 0, 0, 0, 0, 20, 19, 0, 0, 0, 0, 0, 0, 20, 14, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 15, 16, 19, 11, 0, 0, 0, 35, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 13, 14, 13, 14, 13, 14, 13, 13, 14, 13, 14 ]
], # tiles list: uma lista contendo os tiles do mapa
'ct': 24, # collision tiles: os tiles que sao colididos pelo personagem, este numero indica o ultimo tile colidivel na imagem do tileset, ou seja, os tiles de 1 a 24 sao colidiveis
'mw': 300, # map width: largura do mapa em numero de tiles (numero de colunas)
'mh': 39, # map height: altura do mapa em numero de tiles (numero de linhas)
'tw': 64, # tile width: largura do tile em pixels
'th': 64 # tile height: altura do tile em pixels
}
PLAYER = {
's' : "res/img/rabbit.png", # sprites: o arquivo da imagem com os sprites
'ss': [ 0 ], # state stop: lista de sprites do jogador parado
'sr': [ 1, 2, 3, 4, 5, 6 ], # state run: lista de sprites do jogador correndo
'sj': [ 7, 8 ], # state jump: lista de sprites do jogador pulando
'sf': [ 9, 10 ], # state falling: lista de sprites do jogador caindo
'f' : 11, # firing: sprite do coelho atirando
'sw': 137, # sprite width: largura do sprite
'sh': 134, # sprite height: altura do sprite
'nha': [ 45, 30, 45, 10 ] # non hit area: area que nao eh colidida deste sprite, Va na classe GameObject para mais detalhes
}
FIRE = {
's' : "res/img/fire.png", # sprites: o arquivo da imagem com os sprites
'sf': [ 0, 1, 0, 1, 0 ], # sprite fire: lista dos sprites do fogo do tiro da arma
'sw': 22, # sprite width: largura do sprite
'sh': 22 # sprite height: altura do sprite
}
BULLET = {
's' : "res/img/bullet.png", # sprites: o arquivo da imagem
'sw': 11, # sprite width: largura do sprite
'sh': 5 # sprite height: altura do sprite
}
LUMBERJACK = {
's' : "res/img/lumberjack.png",
'sr': [ 3, 2, 3, 1 ], # state run: lista de sprites do inimigo correndo
'sd': [ 0 ], # state run: lista de sprites do inimigo morrendo
'sw': 140, # sprite width: largura do sprite
'sh': 160, # sprite height: altura do sprite
'nha': [ 30, 25, 30, 7 ] # non hit area: area que nao eh colidida deste sprite, Va na classe GameObject para mais detalhes
}
HUNTER = {
's' : "res/img/hunter.png",
'ss': [ 0 ], # state stop
'sd': [ 2 ], # state run: lista de sprites do inimigo morrendo
'f' : 1, # state firing
'sw': 100, # sprite width: largura do sprite
'sh': 140, # sprite height: altura do sprite
'nha': [ 25, 5, 25, 0 ] # non hit area: area que nao eh colidida deste sprite, Va na classe GameObject para mais detalhes
}
BLOOD = {
's' : "res/img/blood.png",
'sb': [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 ], # sprite blood: lista com os sprites do sangue
'sw': 228, # sprite width: largura do sprite
'sh': 105, # sprite height: altura do sprite
}
# funcao que carrega para a memoria os tiles de uma imagem
# param: filename - nome do arquivo com as imagens dos tiles
# param: tile_width - largura de um tile em pixels
# param: tile_height - altura de um tile em pixels
# param-opicional: *alpha - um inteiro indicando o nivel da transparencia
def loadTileList(filename, tile_width, tile_height, *alpha):
image = loadImage(filename, *alpha)
image_width, image_height = image.get_size()
tile_list = []
for tile_y in range(0, image_height/tile_height):
for tile_x in range(0, image_width/tile_width):
rect = (tile_x*tile_width, tile_y*tile_height, tile_width, tile_height)
tile_list.append(image.subsurface(rect))
return tile_list
# funcao que carrega somente uma imagem
# param-opicional: *alpha - um inteiro indicando o nivel da transparencia imagem
def loadImage(filename, *alpha):
_alpha = 255
if alpha.__len__() > 0:
_alpha = alpha[0]
image = pygame.image.load(filename).convert()
image.set_colorkey(MAGENTA) # seta a cor transparente sendo o magenta
image.set_alpha(_alpha) # imagem totalmente opaca (menos a cor transparente)
return image
# funcao que carrega somente uma imagem, algumas coisas como o background serao opacos, ja que nao precisam ser transparentes e o 'blit' sera mais rapido
# param-opicional: *alpha - um inteiro indicando o nivel da transparencia imagem
def loadOpaqueImage(filename, *alpha):
return pygame.image.load(filename).convert()
# retira o colorkey de alguns tiles do mapa, para o 'blit' ser mais rapido (ja que alguns tiles nao precisam de transparencia)
# param: tileList - a lista com os tiles
# param: tileIndexs - uma lista de inteiros com os tiles que nao terao o colorkey
def optimizeSurfacesToBlit(tileList, tileIndexs):
for tIndex in tileIndexs:
tileList[tIndex - 1].set_colorkey(None)
| 387.942708
| 1,131
| 0.405813
| 21,553
| 74,485
| 1.400872
| 0.010996
| 0.886431
| 1.277283
| 1.639188
| 0.942106
| 0.930414
| 0.912993
| 0.906667
| 0.899149
| 0.890173
| 0
| 0.517576
| 0.3068
| 74,485
| 192
| 1,132
| 387.942708
| 0.067186
| 0.043915
| 0
| 0.121951
| 0
| 0
| 0.003022
| 0.000309
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0
| 0.006098
| 0.006098
| 0.04878
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
f0948af62201950b2a793eacd20b21a6b8a430b8
| 114
|
py
|
Python
|
python/gigasecond/gigasecond.py
|
RockLloque/Exercism
|
c437dd6cf3246576900c76c2dba775b6647e3347
|
[
"MIT"
] | null | null | null |
python/gigasecond/gigasecond.py
|
RockLloque/Exercism
|
c437dd6cf3246576900c76c2dba775b6647e3347
|
[
"MIT"
] | null | null | null |
python/gigasecond/gigasecond.py
|
RockLloque/Exercism
|
c437dd6cf3246576900c76c2dba775b6647e3347
|
[
"MIT"
] | null | null | null |
from datetime import timedelta
def add_gigasecond(birth_date):
return birth_date + timedelta(seconds = 10**9)
| 38
| 51
| 0.780702
| 16
| 114
| 5.375
| 0.8125
| 0.209302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030612
| 0.140351
| 114
| 3
| 51
| 38
| 0.846939
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f0c7d2ef8bac38ced40d50a43ac6330f1dc35cff
| 244,727
|
py
|
Python
|
fixed_scrapings.py
|
timmalstead/Night_in_back_end
|
ae3a462c002671d4ac19db3b1a14311a40255779
|
[
"MIT"
] | null | null | null |
fixed_scrapings.py
|
timmalstead/Night_in_back_end
|
ae3a462c002671d4ac19db3b1a14311a40255779
|
[
"MIT"
] | 2
|
2019-11-22T19:31:00.000Z
|
2019-11-24T00:55:25.000Z
|
fixed_scrapings.py
|
timmalstead/Night_in_back_end
|
ae3a462c002671d4ac19db3b1a14311a40255779
|
[
"MIT"
] | 4
|
2019-11-19T01:01:10.000Z
|
2019-11-22T19:25:17.000Z
|
import models
from peewee import *
data_source = [{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead-3",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Night_of_the_Living_Dead_pub.jpg",
"title": "Night of the Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplin-festival",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charlie_Chaplin.jpg",
"title": "Charlie Chaplin Festival",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/his-girl-friday",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=HisgirlFriday.jpg",
"title": "His Girl Friday",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/sita-sings-the-blues-0",
"genre": "Animation",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sita_STB_Poster.jpg",
"title": "Sita Sings the Blues",
"year": 2008},
{"url": "http://publicdomainmovie.net/movie/reefer-madness",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=ReeferMadness_14.jpg",
"title": "Reefer Madness",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/plan-9-from-outer-space-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Plan_9_Alternative_poster.jpg",
"title": "Plan 9 from Outer Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/abraham-lincoln",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Abraham_Lincoln_%2528D._W._Griffith%252C_1930%2529.jpg",
"title": "Abraham Lincoln",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels",
"genre": "Animation",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Gulliverstravelsopening1939.JPG",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/house-on-haunted-hill",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=House_on_Haunted_Hill.jpg",
"title": "House on Haunted Hill",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-vagabond",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Vagabond_%25281916%2529.jpg",
"title": "Charlie Chaplins The Vagabond",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/suddenly",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Suddenly_%25281954%2529.jpg",
"title": "Suddenly",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/voyage-to-the-planet-of-prehistoric-women",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Voyage_to_the_Planet_of_Prehistoric_Women.jpg",
"title": "Voyage to the Planet of Prehistoric Women",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/dressed-to-kill-1",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=dressed_to_kill",
"title": "Dressed To Kill",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-pawnshop",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%2527The_Pawnshop%2527.jpg",
"title": "Charlie Chaplins The Pawnshop",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/my-man-godfrey",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=My_Man_Godfrey_%25281936%2529_1.jpg",
"title": "My Man Godfrey",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/bloody-pit-of-horror",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mickey_Hargitay_1964.JPG",
"title": "Bloody Pit of Horror",
"year": 1965},
{"url": "http://publicdomainmovie.net/movie/my-favorite-brunette-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=My_Favorite_Brunette_%25281947%2529_1.jpg",
"title": "My Favorite Brunette",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-rink",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Rink_%2528poster%2529.jpg",
"title": "Charlie Chaplins The Rink",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/lady-frankenstein",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Lady_Frankenstein",
"title": "Lady Frankenstein",
"year": 1971},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-floorwalker",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Floorwalker_%2528poster%2529.jpg",
"title": "Charlie Chaplins The Floorwalker",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-cure",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Cure_1917_Poster.jpg",
"title": "Charlie Chaplins The Cure",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-lost-world",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lost_World_%25281925%2529_-_film_poster.jpg",
"title": "The Lost World",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/the-stranger",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Movie-Mystery-Magazine-July-1946.jpg",
"title": "The Stranger",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/doa",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=DOA_title.jpg",
"title": "D.O.A.",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/jungle-book",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Jungle_Book_1942.jpg",
"title": "Jungle Book",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/meet-john-doe",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_Meet_John_Doe_01.jpg",
"title": "Meet John Doe",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-a-burlesque-on-carmen",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Burlesque_on_Carmen.jpg",
"title": "Charlie Chaplins A Burlesque On Carmen",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-charlie-shanghaied",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Shanghaied_%25281915_film_SW_poster%2529.jpg",
"title": "Charlie Chaplins Charlie Shanghaied",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/invasion-of-the-bee-girls",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=InvasionOfTheBeeGirls",
"title": "Invasion of the Bee girls",
"year": 1973},
{"url": "http://publicdomainmovie.net/movie/royal-wedding",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fred_Astaire_and_Jane_Powell_in_Royal_Wedding.jpg",
"title": "Royal Wedding",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-charlies-recreation",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Recreation2.jpg",
"title": "Charlie Chaplins Charlies Recreation",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/the-phantom-of-the-opera-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Phantom_of_the_Opera_%25281925_film%2529.jpg",
"title": "The Phantom of the Opera",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/das-kabinett-des-doktor-caligari-the-cabinet-of-dr-caligari",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Das_Cabinet_des_Dr._Caligari.JPG",
"title": "The Cabinet of Dr. Caligari",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-tillies-punctured-romance",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=CC_1914_11_14_TilliesPuncturedRomance",
"title": "Charlie Chaplins Tillies Punctured Romance",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/impact",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ella_Raines-Brian_Donlevy_in_Impact.jpg",
"title": "Impact",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/little-princess-the",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Little_Princess_4.JPG",
"title": "The Little Princess",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-count",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Count.jpg",
"title": "Charlie Chaplins The Count",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/the-three-stooges-color-craziness",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Three_Stooges_1937.jpg",
"title": "The Three Stooges - Color Craziness",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/last-woman-on-earth",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=BetsyJonesMoreland8.jpg",
"title": "Last Woman On Earth",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-iron-mask-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ironmaskposter.jpg",
"title": "The Iron Mask",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/the-flying-deuces",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Flying_Deuces_%25281939%2529_1.jpg",
"title": "The Flying Deuces",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-one-am",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=One_A.M._poster.jpg",
"title": "Charlie Chaplins One A.M.",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-kids-auto-race-at-venice",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_Kid_Auto_Races_at_Venice_1914_%2528poster%2529.jpg",
"title": "Charlie Chaplins Kids Auto Race At Venice",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/penny-serenade",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Cary_Grant-Irene_Dunne_in_Penny_Serenade.jpg",
"title": "Penny Serenade",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/santa-fe-trail",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Santa_Fe_Trail_De_Havilland_Flynn.jpg",
"title": "Santa Fe Trail",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/horror-express-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Horror-Express_Schriftzug.png",
"title": "Horror Express",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-and-the-secret-weapon",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_and_the_Secret_Weapon_2.jpg",
"title": "Sherlock Holmes and the Secret Weapon",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/mr-robinson-crusoe",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mr-Robinson-Crusoe-1932.jpg",
"title": "Mr. Robinson Crusoe",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/mclintock",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=McLintock%2521_4.jpg",
"title": "Mclintock",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/horror-express",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Horror_express_gip.jpg",
"title": "Horror Express",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=COS_09.JPG",
"title": "Carnival of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/the-birth-of-a-nation",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Birth_of_a_Nation_theatrical_poster.jpg",
"title": "The Birth of a Nation",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/new-adventures-of-tarzan",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=NewAdventuresTarzan_title.png",
"title": "New Adventures of Tarzan",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-making-a-living",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Making_a_Living.webm",
"title": "Charlie Chaplins Making A Living",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-mabels-strange-predicament",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mabel%2527sStrangePredicament-1.jpg",
"title": "Charlie Chaplins Mabels Strange Predicament",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/bronenosets-potyomkin-battleship-potemkin",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vintage_Potemkin.jpg",
"title": "Battleship Potemkin",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-good-for-nothing",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_New_Profession.jpg",
"title": "Charlie Chaplins The Good For Nothing",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/tarzan-and-the-green-goddess",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=NewAdventuresTarzan_title.png",
"title": "Tarzan and the Green Goddess",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-a-fair-exchange",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Getting_acquainted.jpg",
"title": "Charlie Chaplins A Fair Exchange",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/disorder-in-the-court",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lobby36disordercourt.jpg",
"title": "Disorder in the Court",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/quicksand",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Quicksandrooney.jpg",
"title": "Quicksand",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/scarlet-street-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Joan_Bennett_in_Scarlet_Street_%25282%2529.jpg",
"title": "Scarlet Street",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/20000-leagues-under-the-sea",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_20%252C000_Leagues_under_the_Sea_%25281916%2529.jpg",
"title": "20,000 Leagues Under the Sea",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Night_of_the_Living_Dead_affiche.jpg",
"title": "Night of the Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/city-of-the-deadhorror-hotel",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Horror_Hotel",
"title": "City of the Dead/Horror Hotel",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-phantom-creeps",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=ThePhantomCreeps",
"title": "The Phantom Creeps",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/captain-kidd",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Captain%2BKidd%2B%25281945%2529%2B1.jpg",
"title": "Captain Kidd",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-ghoul",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheGhoul",
"title": "The Ghoul",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/jack-and-the-beanstalk",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A%2526cjack.jpg",
"title": "Jack and the Beanstalk",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/mclintock-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=mclintok_widescreen",
"title": "McLintock!",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/rene-clairs-and-then-there-were-none",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=And_Then_There_Were_None_1945.jpg",
"title": "Rene Clairs - And Then There Were None",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/too-late-for-tears",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Toolatefortears.jpg",
"title": "Too Late for Tears",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/killers-from-space",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Killers_from_space",
"title": "Killers From Space",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/rock-rock-rock",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=rock_rock_rock",
"title": "Rock, Rock, Rock",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/gorilla-the",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Gorilla_%25281939%2529_1.jpg",
"title": "The Gorilla",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/angel-and-the-badman",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Angel_badman.jpg",
"title": "Angel and the Badman",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/teenagers-from-outer-space",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Teenagersouterspace.jpg",
"title": "Teenagers from Outer Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/attack-of-the-giant-leeches",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=cco_attackofthegiantleeches",
"title": "Attack of the Giant Leeches",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-lady-vanishes",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Lady-Vanishes-1938.jpg",
"title": "The Lady Vanishes",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/brideless-groom",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bridelessgroom_1sht.jpg",
"title": "Brideless Groom",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-amazing-adventure",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Amazing_Adventure_%25281936%2529_01.png",
"title": "The Amazing Adventure",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/farewell-to-arms",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_A_Farewell_to_Arms_%25281932%2529_01.jpg",
"title": "Farewell to Arms",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/cyrano-de-bergerac",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jos%25C3%25A9%2BFerrer-Mala%2BPowers%2Bin%2BCyrano%2Bde%2BBergerac%2B1.jpg",
"title": "Cyrano De Bergerac",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/dishonored-lady",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hedy_Lamarr-Nicholas_Joy_in_Dishonored_Lady.jpg",
"title": "Dishonored Lady",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=CarnivalOfSouls_ipod",
"title": "Carnival of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/child-bride",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Shirley_Mills.jpg",
"title": "Child Bride",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/detour-1",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Detour_%2528poster%2529.jpg",
"title": "Detour",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/a-bucket-of-blood-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Bucket_of_Blood_%25281959%2529_-_Title.jpg",
"title": "A Bucket of Blood",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/intolerance",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Intolerance_%2528film%2529.jpg",
"title": "Intolerance",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/white-zombie-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/star-wreck-in-the-pirkining",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=StarWreckInThePirkining",
"title": "Star Wreck: In the Pirkining",
"year": 2005},
{"url": "http://publicdomainmovie.net/movie/the-general-1",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_general_movie_poster.jpg",
"title": "The General",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/girl-o-my-dreams",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=girl_o_my_dreams",
"title": "Girl o My Dreams",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/hemp-for-victory",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hemp_for_victory_1942.png",
"title": "Hemp for Victory",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/angel-on-my-shoulder-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Anne_Baxter-Paul_Muni_in_Angel_on_My_Shoulder.jpg",
"title": "Angel on My Shoulder",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-saint-louis-bank-robbery",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Steve_McQueen_-_The_Great_St._Louis_Bank_Robbery_%25281959%2529_-_2.jpg",
"title": "The Saint Louis Bank Robbery",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/haxan",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Haxan_sv_poster.jpg",
"title": "Haxan",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/call-of-the-yukon",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=call_of_the_yukon",
"title": "Call of the Yukon",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/cosmos-war-of-the-planets-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Cosmos_War_of_the_Planets",
"title": "Cosmos: War of the Planets",
"year": 1977},
{"url": "http://publicdomainmovie.net/movie/the-most-dangerous-game",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Most_Dangerous_Game_poster.jpg",
"title": "The Most Dangerous Game",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/sing-a-song-of-six-pants",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=SingSong6PantsOneSheet47.JPG",
"title": "Sing A Song of Six Pants",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/things-to-come-1",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=ClaCinOnl_ThingsToCome",
"title": "Things To Come",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/star-odyssey-italian-star-wars-1979",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=StarOdysseyitalianStarWars1979",
"title": "Star Odyssey (Italian Star Wars)",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/the-snows-of-kilimanjaro",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Thesnowsofkilimanjaro-Peck.jpg",
"title": "The Snows of Kilimanjaro",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/berlin-symphony-of-a-great-city",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=BerlinSymphonyofaGreatCity",
"title": "Berlin: Symphony of a Great City",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/beneath-the-12-mile-reef",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beneath_the_12-Mile_Reef_%25281953%2529_still_1.jpg",
"title": "Beneath the 12-Mile Reef",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/oliver-twist",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=DickieMoore.PNG",
"title": "Oliver Twist",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/africa-screams",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lou_Costello_in_Africa_Screams.jpg",
"title": "Africa Screams",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-immigrant",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_The_Immigrant_1917.JPG",
"title": "Charlie Chaplins The Immigrant",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/texas-terror",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Texas_Terror_%25281935%2529_3.jpg",
"title": "Texas Terror",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/a-star-is-born",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fredric_March-Janet_Gaynor_in_A_Star_Is_Born_%25281937%2529.jpg",
"title": "A Star Is Born",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/born-to-win",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=BorntoWin",
"title": "Born to Win",
"year": 1971},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-720p",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=GulliversTravels720p_652",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/driller-killer-uncut",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=DrillerKillerUncut1979",
"title": "Driller Killer",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/malice-in-the-palace",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Malice_In_The_Palace.jpg",
"title": "Malice in the Palace",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/a-man-betrayed",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=AManBetrayed1936Poster.jpg",
"title": "A Man Betrayed",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-scarlet-pimpernel",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=TheScarletPimpernel",
"title": "The Scarlet Pimpernel",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/outlaw-the",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jack_Buetel-Jane_Russell_in_The_Outlaw.jpg",
"title": "Outlaw, The",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/mud-and-sand",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mudandsand-1922-publicityphoto.jpg",
"title": "Mud and Sand",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/at-war-with-the-army",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=AtWarWithTheArmy",
"title": "At War with the Army",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/assignment-outer-space-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Assignment_Outer_Space",
"title": "Assignment Outer Space",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead-4",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Night_of_the_Living_Dead_affiche.jpg",
"title": "Night of the Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/nancy-drew-reporter",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=nancy_drew_reporter",
"title": "Nancy Drew... Reporter",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/attack-from-space-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Attack_From_Space",
"title": "Attack From Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/meet-john-doe-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_Meet_John_Doe_01.jpg",
"title": "Meet John Doe",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/atom-age-vampire",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=AtomAgeVampire",
"title": "Atom Age Vampire",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-green-glove",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=the_green_glove",
"title": "The Green Glove",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/kansas-city-confidential",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=kansascityconfidencial",
"title": "Kansas City Confidential",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/amazing-mr-x-the",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=amazing_mr_x",
"title": "The Amazing Mr. X",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-phantom-planet",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Phantom_Planet",
"title": "The Phantom Planet",
"year": 1961},
{"url": "http://publicdomainmovie.net/movie/please-murder-me",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Please_Murder_Me_movie",
"title": "Please Murder Me",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/flight-to-nowhere",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Michael_Visaroff_in_Flight_to_Nowhere.jpg",
"title": "Flight to Nowhere",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-wasp-woman",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=The_Wasp_Women",
"title": "The Wasp Woman",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/last-of-the-mohicans",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Last-mohicans-1920.jpg",
"title": "Last of the Mohicans",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/brain-that-wouldnt-die-the",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/big-trees-the",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Kirk_douglas_big_trees04.jpg",
"title": "The Big Trees",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/the-kid",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_The_Kid_1921.jpg",
"title": "The Kid",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/white-zombie-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/plan-9-from-outer-space",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Plan_9_Alternative_poster.jpg",
"title": "Plan 9 from Outer Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/west-of-the-divide",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=west_of_the_divide",
"title": "West of the Divide",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/dementia-13",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Dementia_13",
"title": "Dementia 13",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/prisoners-of-the-lost-universe",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=PrisonersOfTheLostUniverse1983",
"title": "Prisoners Of The Lost Universe",
"year": 1983},
{"url": "http://publicdomainmovie.net/movie/jack-and-the-beanstalk-1",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A%2526cjack.jpg",
"title": "Jack and the Beanstalk",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/the-hunchback-of-notre-dame",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=The_Hunchback_of_Notre_Dame",
"title": "The Hunchback of Notre Dame",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/made-for-each-other-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Made_for_Each_Other-_1939-_Poster.png",
"title": "Made for Each Other",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/inner-sanctum-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Inner_Sanctum_movie",
"title": "Inner Sanctum",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-terror",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheTerror",
"title": "The Terror",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/the-goat",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Keaton_Goat_1921.jpg",
"title": "The Goat",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-general-complete-clearer",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_general_movie_poster.jpg",
"title": "The General",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/young-and-innocent-the-girl-was-young",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=YoungandInnocentTheGirlWasYoung",
"title": "Young and Innocent (The Girl Was Young)",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/my-man-godfrey-1936",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=MyManGodfrey1936",
"title": "My Man Godfrey",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/lil-abner-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Billie_Seward-Jeff_York_in_Li%2527l_Abner.jpg",
"title": "Lil Abner",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/inner-sanctum",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=inner_sanctum",
"title": "Inner Sanctum",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/africa-speaks",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Africa_Speaks_poster_1930.jpg",
"title": "Africa Speaks",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/till-the-clouds-roll-by",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Till_The_Clouds_Roll_By.jpg",
"title": "Till The Clouds Roll By",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/cat-women-of-the-moon",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Cat_Women_of_the_Moon",
"title": "Cat-Women of the Moon",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-police",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Police_1916.jpg",
"title": "Charlie Chaplins Police",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-adventurer",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Adventurer_%2528film%2529.jpg",
"title": "Charlie Chaplins The Adventurer",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-red-house-0",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Red_House_%25281947%2529_1.jpg",
"title": "The Red House",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-screaming-skull-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_for_The_Screaming_Skull.jpg",
"title": "The Screaming Skull",
"year": 1958},
{"url": "http://publicdomainmovie.net/movie/the-blood-of-jesus",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=blood_of_jesus",
"title": "The Blood Of Jesus",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-knockout",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Knockout_%2528poster%2529.jpg",
"title": "Charlie Chaplins The Knockout",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/woman-on-the-run",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Woman_on_the_Run",
"title": "Woman on the Run",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/under-california-stars",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Under_California_Stars_%25281948%2529_1.jpg",
"title": "Under California Stars",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/abe-lincoln-of-the-ninth-avenue",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Streets_of_New_York_poster.jpg",
"title": "Abe Lincoln of the Ninth Avenue",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/m-fritz-lang",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=PhantasmagoriaTheater-MFritzLang1931574",
"title": "M",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/phantom-from-space",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Phantom_From_Space",
"title": "Phantom From Space",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/the-hitch-hiker",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hitchhiker1953.JPG",
"title": "The Hitch-Hiker",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/jamaica-inn",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=Jamaica_Inn",
"title": "Jamaica Inn",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/nosferatudvdquality",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Nosferatu_DVD_quality",
"title": "Nosferatu",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/the-chase",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=TheChase_",
"title": "The Chase",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain that Wouldnt Die",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/things-to-come",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=things_to_come_ipod",
"title": "Things to Come",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-vampire-bat-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vampirebat.jpg",
"title": "The Vampire Bat",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/attack-of-the-giant-leeches-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=AttackOfTheGiantLeeches",
"title": "Attack of the Giant Leeches",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/mr-wong-detective",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=MrWongDetective.jpg",
"title": "Mr. Wong, Detective",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/beat-the-devil-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beatthedevil01.jpg",
"title": "Beat the Devil",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/rain",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Joan_Crawford_in_Rain_3.jpg",
"title": "Rain",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/lady-of-burlesque",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=LadyofBurlesque",
"title": "Lady of Burlesque",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/affairs-of-cappy-ricks-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Walter_brennan_affairs_of_cappy_ricks_ss2.jpg",
"title": "Affairs of Cappy Ricks",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead-ipod-and-flash-video-versions",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Night_of_the_Living_Dead_affiche.jpg",
"title": "Night of the Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/war-babies",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=war_babies",
"title": "War Babies",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/arizona-kid-the",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Arizona_Kid.jpg",
"title": "Arizona Kid, The",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/little-lord-fauntleroy-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=little_lord_fauntleroy",
"title": "Little Lord Fauntleroy",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/bluebeard",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Bluebeard",
"title": "Bluebeard",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/20000-leagues-under-the-sea-1",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_20%252C000_Leagues_under_the_Sea_%25281916%2529.jpg",
"title": "20,000 Leagues Under the Sea",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/dressed-to-kill",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=dressed_to_kill_ipod",
"title": "Dressed to Kill",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/three-came-home",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=3CameHomePoster.jpg",
"title": "Three Came Home",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-caught-in-a-caberet",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Caught_in_a_Cabaret_%2528poster%2529.jpg",
"title": "Charlie Chaplins Caught in a Caberet",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/indestructible-man",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=indestructible_man",
"title": "Indestructible Man",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/one-week",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_keaton_one_week_poster.jpg",
"title": "One Week",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/submarine-alert",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Screen_shot_Submarine_Alert.png",
"title": "Submarine Alert",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/iron-mask",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ironmaskposter.jpg",
"title": "Iron Mask",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/my-pal-trigger",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=my_pal_trigger",
"title": "My Pal Trigger",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/bride-of-the-gorilla",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=bride_of_a_gorilla",
"title": "Bride of the Gorilla",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/santa-and-the-three-bears",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=SantaandtheThreeBears",
"title": "Santa and the Three Bears",
"year": 2014},
{"url": "http://publicdomainmovie.net/movie/steamboat-bill-jr",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Steamboat_bill_poster.jpg",
"title": "Steamboat Bill, Jr.",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/billy-the-kid-returns",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=BillytheKidReturns",
"title": "Billy the Kid Returns",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-time-of-your-life",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Time_Of_Your_Life_%25281948%2529_1.jpg",
"title": "The Time of Your Life",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/war-of-the-wildcats-john-wayne",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=In_Old_Oklahoma_%25281943%2529_6.jpg",
"title": "War of the Wildcats - John Wayne",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/last-time-i-saw-paris-the",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=last_time_i_saw_paris",
"title": "Last Time I Saw Paris, The",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/beat-the-devil",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beatthedevil01.jpg",
"title": "Beat the Devil",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/sabotage",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Sabotage_1936",
"title": "Sabotage",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/our-town",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Our_Town",
"title": "Our Town",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/love-laughs-at-andy-hardy",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Love_Finds_Andy_Hardy_%25281939%2529_1.jpg",
"title": "Love Laughs at Andy Hardy",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/doll-face",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Doll_Face_-_Title_card.png",
"title": "Doll Face",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die-1",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/monstrosity-aka-the-atomic-brain",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=atomic_brain_1964",
"title": "Monstrosity AKA / The Atomic Brain",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/life-with-father",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Life_with_Father_%25281947%2529.jpg",
"title": "Life with Father",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/les-vampires-directed-by-louis-feuillade",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=LesVampires1915DirectedByLouisFeuillade",
"title": "Les Vampires directed by Louis Feuillade",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/boys-of-the-city",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=boys_of_the_city",
"title": "Boys of the City",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/five-minutes-to-live-aka-door-to-door-maniac",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=FiveMinutesTo_Live.jpg",
"title": "Five Minutes to Live / AKA Door-to-Door Maniac",
"year": 1961},
{"url": "http://publicdomainmovie.net/movie/a-study-in-scarlet",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%25D0%2597%25D0%25B0%25D0%25BD%25D1%258F%25D1%2582%25D0%25B8%25D1%258F_%25D0%25B2_%25D0%25B0%25D0%25BB%25D0%25BE%25D0%25BC.png",
"title": "A Study In Scarlet",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/war-comes-to-america",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Why_We_Fight_7%252C_War_Comes_to_America.ogv",
"title": "War Comes to America",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-corpse-vanishes",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Corpsevanishes.jpg",
"title": "The Corpse Vanishes",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/atom-age-vampire-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=atom_age_vampire",
"title": "Atom Age Vampire",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/fear-in-the-night",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Fear_in_the_Night",
"title": "Fear in the Night",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/algiers-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Algiers_1938_%25282%2529.jpg",
"title": "Algiers",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/blonde-ice-1948",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=BlondeIce1948",
"title": "Blonde Ice",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/jail-bait",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=JailBait",
"title": "Jail Bait",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/vengeance-valley",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vengeance_valley_poster.jpg",
"title": "Vengeance Valley",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/hell-ship-mutiny",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=HELLSHIPMUTINY1957JRJonHallPeterLorreSaltytheChimp2",
"title": "Hell Ship Mutiny",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/gunsmoke-ranch",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=gunsmoke_ranch",
"title": "Gunsmoke Ranch",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-scar-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Scar_1948",
"title": "The Scar",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/mr-motos-last-warning",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=mr_motos_last_warning",
"title": "Mr. Motos Last Warning",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/his-girl-friday-ipod",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=his_girl_friday_ipod",
"title": "His Girl Friday iPod",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/my-dear-secretary",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=my_dear_secretary",
"title": "My Dear Secretary",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/black-dragons",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Blackdragons.jpg",
"title": "Black Dragons",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/danger-flight",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=DANGER_FLIGHT_poster.jpg",
"title": "Danger Flight",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/love-affair",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Irene_Dunne-Charles_Boyer_in_Love_Affair_2.jpg",
"title": "Love Affair",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-holy-ghost-people",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=HolyGhostPeople",
"title": "The Holy Ghost People",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/american-empire",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=american_empire",
"title": "American Empire",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-bashful-bachelor",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Chester_Lauck.gif",
"title": "The Bashful Bachelor",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/jackie-robinson-story-the",
"genre": "Biography",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jackie_Robinson_Story_lobby_card.jpg",
"title": "The Jackie Robinson Story",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/white-zombie",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-woman-in-green-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "The Woman in Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-three-stooges-collection",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Three_Stooges_1937.jpg",
"title": "The Three Stooges Collection",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/three-broadway-girls-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=David_Manners%252C_Madge_Evans%252C_Joan_Blondell%252C_Ina_Claire.jpg",
"title": "Three Broadway Girls",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/lost-in-the-stratosphere-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=lost_in_the_stratosphere_ipod",
"title": "Lost in the Stratosphere",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/scarlet-street",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=scarlet_street_ipod",
"title": "Scarlet Street",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/blue-steel",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Blue_Steel_%25281934%2529_2.jpg",
"title": "Blue Steel",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/pot-o-gold",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=James_Stewart-Paulette_Goddard_in_Pot_o%2527_Gold.jpg",
"title": "Pot o Gold",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-mystery-of-mr-wong",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mystery_of_Mr._Wong_poster.jpg",
"title": "The Mystery of Mr. Wong",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-second-woman",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Second_Woman_",
"title": "The Second Woman",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/hook-line-and-sinker",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=hook_line_and_sinker",
"title": "Hook Line and Sinker",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/randy-rides-alone",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Randy_Rides_Alone_%25281934%2529_01.png",
"title": "Randy Rides Alone",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/detour-ipod",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Detour_%2528poster%2529.jpg",
"title": "Detour (iPod)",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-lost-world-1",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lost_World_%25281925%2529_-_film_poster.jpg",
"title": "The Lost World",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/this-is-the-army-0",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=this_is_the_army_mp4",
"title": "This is the Army",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/the-north-star",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=RKO_Pictures_The_North_Star_.JPG",
"title": "The North Star",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-between-showers",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=CC_1914_02_28_BetweenShowers",
"title": "Charlie Chaplins Between Showers",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/the-night-of-counting-the-years",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Night_of_Counting_the_Years",
"title": "The Night of Counting the Years",
"year": 1969},
{"url": "http://publicdomainmovie.net/movie/that-uncertain-feeling",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=That_Uncertain_Feeling_2.jpg",
"title": "That Uncertain Feeling",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/whistle-stop",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Whistle_Stop",
"title": "Whistle Stop",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-prelude-to-war",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: Prelude to War",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/bowery-blitzkrieg",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=bowery_blitzkrieg",
"title": "Bowery Blitzkrieg",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/daniel-boone-trail-blazer",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Daniel_Boone_-_Trail_Blazer",
"title": "Daniel Boone, Trail Blazer",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/phantom-planet",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=phantom_planet_ipod",
"title": "Phantom Planet",
"year": 1961},
{"url": "http://publicdomainmovie.net/movie/a-bucket-of-blood-1959",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=ABucketOfBlood1959",
"title": "A Bucket of Blood",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/his-girl-friday-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_Girl_Friday_still_2.jpg",
"title": "His Girl Friday",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-painted-hills",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lassie.jpg",
"title": "The Painted Hills",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/nothing-sacred",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Nothing_Sacred_lobby_card.jpg",
"title": "Nothing Sacred",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/stranger-the",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=the_stranger_film",
"title": "The Stranger",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/attack-from-space",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=attack_from_space_ipod",
"title": "Attack from Space",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/the-phantom-of-the-opera",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Phantom_of_the_Opera_%25281925_film%2529.jpg",
"title": "The Phantom of the Opera",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/embryo",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=embryo_RockHudson",
"title": "Embryo",
"year": 1976},
{"url": "http://publicdomainmovie.net/movie/philo-vance-the-kennel-murder-case",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Etienne_Girardot_in_The_Kennel_Murder_Case.jpg",
"title": "Philo Vance - The Kennel Murder Case",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/women-in-the-night",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Virginia_Christine_in_Women_in_the_Night.jpg",
"title": "Women in the Night",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/airborne",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=airborne",
"title": "Airborne",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/killer-shrews",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=The_Killer_Shrews",
"title": "Killer Shrews",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-fabulous-dorseys",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Tommy_%2526_Jimmy_Dorsey_in_The_Fabulous_Dorseys.jpg",
"title": "The Fabulous Dorseys",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-playhouse",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Keaton_Playhouse_1921.jpg",
"title": "The Playhouse",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/oh-susanna",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Oh%252C_Susanna%2521_Poster.jpg",
"title": "Oh, Susanna!",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/eternally-yours-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Eternally-Yours-1939.jpg",
"title": "Eternally Yours",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/shock-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=shock",
"title": "Shock",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/alice-in-wonderland",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Alice_in_Wonderland_%25281915%2529.webm",
"title": "Alice In Wonderland",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/guest-in-the-house-1944",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=GuestInTheHouse1944",
"title": "Guest in the House",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/swing-high-swing-low",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Swing_High%252C_Swing_Low_%25281937%2529_1.jpg",
"title": "Swing High, Swing Low",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/pool-sharks",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=pool_sharks",
"title": "Pool Sharks",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/a-night-in-the-show",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Night_in_the_Show_%2528poster%2529.jpg",
"title": "A Night in the Show",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/angel-and-the-bad-man-the",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Angel_badman.jpg",
"title": "The Angel and the Bad Man",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/raiders-of-old-california-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=RaidersofOldCalifornia_title.jpg",
"title": "Raiders of Old California",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-cruel-cruel-love",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_Cruel_Cruel_Love_1914.JPG",
"title": "Charlie Chaplins Cruel Cruel Love",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/dead-men-walk",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=DeadMenWalk",
"title": "Dead Men Walk",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/boys-of-the-city-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=boys_of_the_city_ipod",
"title": "Boys of the City",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-general-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_general_movie_poster.jpg",
"title": "The General",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/beyond-tomorrow",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Beyond_Tomorrow",
"title": "Beyond Tomorrow",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-strange-love-of-martha-ivers",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Martha_Ivers",
"title": "The Strange Love of Martha Ivers",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/second-chorus-0",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Astaire_in_Second_Chorus.jpg",
"title": "Second Chorus",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/doa-0",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Laurette_Luez_and_Edmond_O%2527Brien_in_DOA.jpg",
"title": "D.O.A.",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-killer-shrews",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=The_Killer_Shrews_1959",
"title": "The Killer Shrews",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-private-life-of-henry-viii",
"genre": "Biography",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Private-Life-Henry-VIII.jpg",
"title": "The Private Life of Henry VIII.",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-incredible-petrified-world",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=TheIncrediblePetrifiedWorld",
"title": "The Incredible Petrified World",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/she-gods-of-shark-reef",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=SheGodsSharkReef-poster.jpg",
"title": "She Gods of Shark Reef",
"year": 1958},
{"url": "http://publicdomainmovie.net/movie/bird-of-paradise",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Delores_del_Rio-publicity.JPG",
"title": "Bird of Paradise",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/silent-night-bloody-night",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=SilentNightBloodyNight",
"title": "Silent Night, Bloody Night",
"year": 1974},
{"url": "http://publicdomainmovie.net/movie/aerial-gunner",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Screen_shot_Aerial_Gunner.png",
"title": "Aerial Gunner",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/the-yesterday-machine",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=TheYesterdayMachine1963",
"title": "The Yesterday Machine",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/tormented-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=tormented",
"title": "Tormented",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/of-human-bondage",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Of_Human_Bondage_Poster.jpg",
"title": "Of Human Bondage",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-a-busy-day",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Busy_Day.jpg",
"title": "Charlie Chaplins A Busy Day",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/marihuana",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Marihuana_1_%25281936%2529.jpg",
"title": "Marihuana",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/black-dragons-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Blackdragons.jpg",
"title": "Black Dragons",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-little-shop-of-horrors-2",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=The_Little_Shop_of_Horrors.mpeg",
"title": "The Little Shop of Horrors",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/farewell-to-arms-a",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_A_Farewell_to_Arms_%25281932%2529_01.jpg",
"title": "A Farewell to Arms",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/hercules-and-the-tyrants-of-babylon",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Hercules_and_the_Tyrants_of_Babylon",
"title": "Hercules and the Tyrants of Babylon",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/neath-brooklyn-bridge",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=neath_brooklyn_bridge",
"title": "Neath Brooklyn Bridge",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/cops",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Cops",
"title": "Cops",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/my-love-for-your-aka-honeymoon-in-bali",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Honeymoon_in_Bali_%25281939%2529_still_1.jpg",
"title": "My Love for Your (AKA Honeymoon in Bali)",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/queen-of-the-amazons",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=QueenoftheAmazons",
"title": "Queen of the Amazons",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/mad-monster-the",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=mad_monster",
"title": "The Mad Monster",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-amazing-mr-x",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Amazing_Mr._X_%25281948%2529_1.jpg",
"title": "The Amazing Mr. X",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/giant-gila-monster-the",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=giant_gila_monster",
"title": "The Giant Gila Monster",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/topper-returns",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Topper_Returns_41",
"title": "Topper Returns",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-amazing-transparent-man",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=The_Amazing_Transparent_Man",
"title": "The Amazing Transparent Man",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/earthworm-tractors",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Earthworm_Tractors_%25281936%2529_1.jpg",
"title": "Earthworm Tractors",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/this-is-not-a-test",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=ThisisNotaTest",
"title": "This Is Not a Test",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/black-fist",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=BlackFist",
"title": "Black Fist",
"year": 1974},
{"url": "http://publicdomainmovie.net/movie/jigsaw",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Jigsaw_",
"title": "Jigsaw",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/flesh-and-the-spur",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=flesh_and_the_spur",
"title": "Flesh and the Spur",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/maniac",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Maniac1934",
"title": "Maniac",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-real-bruce-lee",
"genre": "Biography",
"image": "http://publicdomainmovie.net/image.php?id=TheRealBruceLee",
"title": "The Real Bruce Lee",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/horrors-of-spider-island",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Horrors_of_Spider_Island",
"title": "Horrors of Spider Island",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/aces-and-eights",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Aces_and_Eights_poster.jpg",
"title": "Aces and Eights",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/stork-club-the",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Stork_Club_poster.jpg",
"title": "The Stork Club",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/eternally-yours",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Eternally-Yours-1939.jpg",
"title": "Eternally Yours",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/mesa-of-lost-women",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=MesaOfLostWomen",
"title": "Mesa of Lost Women",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-and-the-secret-weapon-1943",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_and_the_Secret_Weapon_2.jpg",
"title": "Sherlock Holmes and the Secret Weapon",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/shoulder-arms",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Shoulder_Arms_poster.jpg",
"title": "Shoulder Arms",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/scrooge",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=scrooge_ipod",
"title": "Scrooge",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/reign-of-terror",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Reign_of_Terror_movie",
"title": "Reign of Terror",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/angel-on-my-shoulder",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=angelOnMyShoulder",
"title": "Angel on My Shoulder",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-blacksmith",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheBlacksmith",
"title": "The Blacksmith",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-behind-the-screen",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Behind_the_Screen.jpg",
"title": "Charlie Chaplins Behind The Screen",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/a-bride-for-henry",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=bride_for_henry_ipod",
"title": "A Bride for Henry",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/parlor-bedroom-and-bath",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=ParlorBedroomandBath",
"title": "Parlor, Bedroom and Bath",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/borderline-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Borderline_%25281950_1.jpg",
"title": "Borderline",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-his-new-job",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_New_Job.jpg",
"title": "Charlie Chaplins His New Job",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/the-paleface",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=ThePaleface",
"title": "The Paleface",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/million-dollar-weekend-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Gene_Raymond-Osa_Massen_in_Million_Dollar_Weekend.jpg",
"title": "Million Dollar Weekend",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-war-comes-to-america",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: War Comes to America",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/half-shot-at-sunrise",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Half_Shot_at_Sunrise_%25281930%2529%252C_Bob_Woolsey_and_Leni_Stengel.jpg",
"title": "Half Shot at Sunrise",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/the-limping-man",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=TheLimpingMan",
"title": "The Limping Man",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/his-double-life",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=his_double_life",
"title": "His Double Life",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/college",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Busterkeaton_college.jpg",
"title": "College",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-triple-trouble",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Triple_Trouble_%25281918%2529_-_1.jpg",
"title": "Charlie Chaplins Triple Trouble",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/fathers-little-dividend",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Father%2527s_Little_Dividend_1.jpg",
"title": "Fathers Little Dividend",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/one-body-too-many",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=one_body_too_many_ipod",
"title": "One Body Too Many",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/lady-says-no",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=lady_says_no",
"title": "Lady Says No",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/the-fast-and-the-furious-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Fast_and_the_Furious",
"title": "The Fast and the Furious",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/timetable-aka-time-table",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Timetable1956",
"title": "Timetable",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/please-murder-me-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=please_murder_me_ipod",
"title": "Please Murder Me",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/bad-man-of-deadwood",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=BadManOfDeadwood",
"title": "Bad Man of Deadwood",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-hoodlum",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=The_Hoodlum",
"title": "The Hoodlum",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/long-shot-the",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=C._Henry_Gordon_in_Long_Shot.jpg",
"title": "The Long Shot",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/strange-illusion",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Strange_Illusion",
"title": "Strange Illusion",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-dawn-rider",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_The_Dawn_Rider.jpg",
"title": "The Dawn Rider",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/ape-the",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Theape.jpg",
"title": "Ape, The",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-the-battle-of-china",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: The Battle of China",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/tombstone-canyon",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=tombstone_canyon",
"title": "Tombstone Canyon",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-fireman",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%2527The_Fireman%2527.jpg",
"title": "Charlie Chaplins The Fireman",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/paradise-isle",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Movita_in_Paradise_Isle_%25281937%2529_1.jpg",
"title": "Paradise Isle",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/horror-hotel",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=HorrorHotel1960",
"title": "Horror Hotel",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-last-of-the-mohicans",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Last_of_the_Mohicans_%25281920%2529_-_8.jpg",
"title": "The Last of the Mohicans",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-bond",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charlie_Chaplin%252C_bond_of_friendship%252C_1918.ogg",
"title": "Charlie Chaplins The Bond",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/barefoot-boy",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=barefoot_boy",
"title": "Barefoot Boy",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/cyrano-debergerac",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Cyrano_DeBergerac_ipod",
"title": "Cyrano DeBergerac",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/the-big-combo-1",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Big_Combo_poster.jpg",
"title": "The Big Combo",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-perils-of-pauline",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=ThePerilsofPauline",
"title": "The Perils of Pauline",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/le-voyage-dans-la-lune",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Voyage_dans_la_lune_title_card.png",
"title": "Le Voyage dans la lune",
"year": 1902},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-the-battle-of-britain",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: The Battle of Britain",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-a-film-johnnie",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Film_Johnnie_%25281914%2529.webm",
"title": "Charlie Chaplins A Film Johnnie",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/blood-on-the-sun",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Blood_on_the_Sun_%25281945%2529_1.jpg",
"title": "Blood on the Sun",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/bloody-pit-of-horror-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=bloody_pit_of_horror_ipod",
"title": "Bloody Pit of Horror",
"year": 1965},
{"url": "http://publicdomainmovie.net/movie/lonely-wives",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lonely_Wives%252C_%25281931%2529%252C_Patsy_Ruth_Miller%252C_Laura_La_Plante%252C_Esther_Ralston%252C_Publicity_%2528Pathe%2529.jpg",
"title": "Lonely Wives",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/the-chase-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=chase_the",
"title": "The Chase",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/street-angel-%E9%A6%AC%E8%B7%AF%E5%A4%A9%E4%BD%BF",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=street_angel",
"title": "Street Angel (馬路天使)",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/danger-lights",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dangerlights1931.jpg",
"title": "Danger Lights",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/atlantic-flight",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=AtlanticFlight",
"title": "Atlantic Flight",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-pied-piper-of-hamelin",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/image.php?id=The_Pied_Piper_of_Hamelin",
"title": "The Pied Piper of Hamelin",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/cause-for-alarm",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=cause_for_alarm_1951",
"title": "Cause for Alarm!",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/the-kid-auto-race-in-venice",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_Kid_Auto_Races_at_Venice_1914_%2528poster%2529.jpg",
"title": "The Kid Auto Race in Venice",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/evil-brain-from-outer-space",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=EvilBrainFromOuterSpace",
"title": "Evil Brain From Outer Space",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-laffing-gas-aka-laughing-gas",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Laughing_Gas_%2528poster%2529.jpg",
"title": "Charlie Chaplins Laughing Gas",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/young-bill-hickok",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=young_bill_hickok",
"title": "Young Bill Hickok",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/becky-sharp",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Becky_Sharp_%25281935%2529_8.jpg",
"title": "Becky Sharp",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/speak-easily",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Pressbook_herald_Speak_Easily_1932_Buster_Keaton_Jimmy_Durante_Thelma_Todd.jpg",
"title": "Speak Easily",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/barefoot-boy-0",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=barefoot_boy_1938",
"title": "Barefoot Boy",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/go-for-broke",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=go_for_broke_ACM",
"title": "Go for Broke",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/the-big-cat",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Big_Cat",
"title": "The Big Cat",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-masquerader",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Masquerader.jpg",
"title": "Charlie Chaplins The Masquerader",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/danger-ahead",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fred_Kelsey_in_Danger_Ahead.jpg",
"title": "Danger Ahead",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/forgotten-village",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=forgotten_village",
"title": "Forgotten Village",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/affairs-of-cappy-ricks",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Walter_brennan_affairs_of_cappy_ricks_ss2.jpg",
"title": "Affairs of Cappy Ricks",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-death-kiss-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Thedeathkissposter.jpg",
"title": "The Death Kiss",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/alices-wonderland",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Alice%2527sWonderland.ogv",
"title": "Alices Wonderland",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-his-favorite-pasttime",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_Favourite_Pastime_1914.jpg",
"title": "Charlie Chaplins His Favorite Pasttime",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/lying-lips-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lying_Lips_Poster_1939.jpg",
"title": "Lying Lips",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/princess-iron-fan-%E9%90%B5%E6%89%87%E5%85%AC%E4%B8%BB",
"genre": "Animation",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Princessironfan.jpg",
"title": "Princess Iron Fan (鐵扇公主)",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/bela-lugosi-meets-a-brooklyn-gorilla",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=BelaLugosiMeetsaBrooklynGorilla",
"title": "Bela Lugosi Meets a Brooklyn Gorilla",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/man-in-the-attic",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Man_in_the_Attic_movie",
"title": "Man in the Attic",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/lost-in-the-stratosphere",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=lost_in_the_stratosphere",
"title": "Lost in the Stratosphere",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-southerner",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Southerner%252C_1945%252C_Betty_Field_and_Zachary_Scott_arrive_at_farm.jpg",
"title": "The Southerner",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/drums-in-the-deep-south",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=Drums_in_the_Deep_South",
"title": "Drums in the Deep South",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/neath-the-arizona-skies",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_%2527Neath_the_Arizona_Skies_03.jpg",
"title": "Neath The Arizona Skies",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/home",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Home2009",
"title": "Home",
"year": 2009},
{"url": "http://publicdomainmovie.net/movie/the-four-horsemen-of-the-apocalypse",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Four_Horsemen_of_the_Apocalypse_Poster.jpg",
"title": "The Four Horsemen of the Apocalypse",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-golem",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Golem_1920_Poster.jpg",
"title": "The Golem",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/money-madness",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=MoneyMadness",
"title": "Money Madness",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/mr-imperium",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mr._Imperium_1951.JPG",
"title": "Mr. Imperium",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/mr-wong-in-chinatown",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Mr_Wong_in_Chinatown",
"title": "Mr. Wong in Chinatown",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-the-nazis-strike",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: The Nazis Strike",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/sagebrush-trail",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sagebrush_Trail.jpg",
"title": "Sagebrush Trail",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/suddenly-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=suddenly_avi",
"title": "Suddenly",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/assignment-outer-space",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=AssignmentOuterSpace",
"title": "Assignment Outer Space",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-electric-house",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheElectricHouse",
"title": "The Electric House",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/blood-and-sand",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Blood_and_Sand_1922_poster.jpg",
"title": "Blood And Sand",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/thirteenth-guest",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=thirteenth_guest",
"title": "Thirteenth Guest",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/judge-priest",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Judge_Priest",
"title": "Judge Priest",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/one-week-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_keaton_one_week_poster.jpg",
"title": "One Week",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/reefer-madness-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=reefer_madness_ipod",
"title": "Reefer Madness",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/adventure-island",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=AdventureIsland",
"title": "Adventure Island",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-landladys-pet-aka-the-star-boarder",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Star_Boarder.jpg",
"title": "Charlie Chaplins The Landladys Pet",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/cosmos-war-of-the-planets",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=CosmosWarofthePlanets",
"title": "Cosmos: War of the Planets",
"year": 1977},
{"url": "http://publicdomainmovie.net/movie/dangerous-money",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Dangerous_Money",
"title": "Dangerous Money",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/track-of-the-moon-beast",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TrackoftheMoonBeast",
"title": "Track of the Moon Beast",
"year": 1976},
{"url": "http://publicdomainmovie.net/movie/moon-of-the-wolf",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=MoonoftheWolf_",
"title": "Moon of the Wolf",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/the-last-time-i-saw-paris",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Last_Time_I_Saw_Paris_1.jpg",
"title": "The Last Time I Saw Paris",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/second-chorus",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=second_chorus_1940",
"title": "Second Chorus",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/boy-what-a-girl",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=boy_what_a_girl",
"title": "Boy! What a Girl!",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/monster-from-a-prehistoric-planet",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=GappaJapaneseposter.jpg",
"title": "Monster From a Prehistoric Planet",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Night_of_the_Living_Dead_affiche.jpg",
"title": "Night Of The Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/million-dollar-kid",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Million_Dollar_Kid_lobby_card.jpg",
"title": "Million Dollar Kid",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/steamboat-bill-jr-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Steamboat_bill_poster.jpg",
"title": "Steamboat Bill Jr.",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/the-general",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_general_movie_poster.jpg",
"title": "The General",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/kansas-city-confidential-ipod",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=kansas_city_confidential_ipod",
"title": "Kansas City Confidential (iPod)",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/the-pay-off-1935",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=thepayoff",
"title": "The Pay Off",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-the-battle-of-russia-part-2",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: The Battle of Russia - Part 2",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/double-exposure",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=double_exposure",
"title": "Double Exposure",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/the-chinese-ring",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_The_Chinese_Ring.jpg",
"title": "The Chinese Ring",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/rage-at-dawn",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=RageatDawn",
"title": "Rage at Dawn",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-lucky-texan",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lucky_Texan_lobby_card.jpg",
"title": "The Lucky Texan",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/lil-abner",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Billie_Seward-Jeff_York_in_Li%2527l_Abner.jpg",
"title": "Lil Abner",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/horror-express-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Horror_Express_WebM",
"title": "Horror Express",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/ambush-valley",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ambush_Valley_-_movie_poster.jpg",
"title": "Ambush Valley",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/women-in-the-night-0",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Virginia_Christine_in_Women_in_the_Night.jpg",
"title": "Women In the Night",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-little-princess-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Little_Princess_4.JPG",
"title": "The Little Princess",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=CarnivalOfSouls1962",
"title": "Carnival of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/daydreams",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Daydreams",
"title": "Daydreams",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/the-trail-beyond",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Trail-Beyond-1934.jpg",
"title": "The Trail Beyond",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/a-romance-of-the-redwoods",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Romance_of_the_Redwoods.jpg",
"title": "A Romance of the Redwoods",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-boat",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_Keaton_Sybil_Seely_The_Boat_screenshot_1_christening.jpg",
"title": "The Boat",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/dishonored-lady-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hedy_Lamarr-Nicholas_Joy_in_Dishonored_Lady.jpg",
"title": "Dishonored Lady",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-three-stooges-color-craziness-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Three_Stooges_1937.jpg",
"title": "The Three Stooges - Color Craziness!",
"year": 2005},
{"url": "http://publicdomainmovie.net/movie/the-curious-adventures-of-mr-wonderbird",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=The_Curious_Adventures_of_Mr_Wonderbird",
"title": "The Curious Adventures of Mr. Wonderbird",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/why-we-fight-the-battle-of-russia-part-1",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Transmitlies.jpeg",
"title": "Why We Fight: The Battle of Russia - Part 1",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/sky-patrol",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=sky_patrol_ipod",
"title": "Sky Patrol",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-ape-man",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Ape-Man-Poster.jpg",
"title": "The Ape Man",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/in-the-year-2889-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=InTheYear28891967",
"title": "In the Year 2889",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/the-man-who-cheated-himself-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=TheManWhoCheatedHimself1950",
"title": "The Man Who Cheated Himself",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/gulliverstravels-1939",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=Gullivers_Travels-1939",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-devil-bat",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Devil_Bat_movie",
"title": "The Devil Bat",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/becky-sharp-1935",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=BeckySharp1935",
"title": "Becky Sharp",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-face-on-the-barroom-floor",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Face_on_the_Bar_Room_Floor.jpg",
"title": "Charlie Chaplins Farce On The Barroom Floor",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/midnight-manhunt-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=midnight_manhunt_ipod",
"title": "Midnight Manhunt",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/check-and-double-check",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Amos_andy_1929_postcard.JPG",
"title": "Check And Double Check",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/the-strange-woman",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Strange_Woman_movie",
"title": "The Strange Woman",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/love-nest",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=LoveNest",
"title": "Love Nest",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/monster-maker-the",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=monster_maker_ipod",
"title": "The Monster Maker",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/scrooge-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Scrooge_855",
"title": "Scrooge",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/girl-o-my-dreams-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=girl_o_my_dreams_ipod",
"title": "Girl O My Dreams",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/sos-your-aunt-emma-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=sos_your_aunt_emma_1942",
"title": "Sos Your Aunt Emma",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/under-the-big-top",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=under_the_big_top",
"title": "Under The Big Top",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/invisible-ghost-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=the_invisible_ghost_ipod",
"title": "Invisible Ghost",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-bat-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Bat.mpeg",
"title": "The Bat",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-charolot-gargon-de-theater-the-property-man",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Property_Man_%2528poster%2529.jpg",
"title": "Charlie Chaplins Charolot Gargon De Theater",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/the-last-mile",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Last_Mile_FilmPoster.jpeg",
"title": "The Last Mile",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-star-packer",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Star_Packer_FilmPoster.jpeg",
"title": "The Star Packer",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-his-prehistoric-past",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_prehistoric_past.jpg",
"title": "Charlie Chaplins His Prehistoric Past",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/sita-sings-the-blues",
"genre": "Animation",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sita_STB_Poster.jpg",
"title": "Sita Sings the Blues",
"year": 2008},
{"url": "http://publicdomainmovie.net/movie/eegah",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Eegah",
"title": "Eegah",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/bank-alarm",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bank_Alarm_FilmPoster.jpeg",
"title": "Bank Alarm",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/invisible-ghost",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=TheInvisibleGhost",
"title": "Invisible Ghost",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-scarlet-clue",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Scarlet_Clue_%25281945%2529.webm",
"title": "The Scarlet Clue",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-mabels-busy-day",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mabel_Busy_Day.jpg",
"title": "Charlie Chaplins Mabels Busy Day",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/mr-reckless",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Nestor_Paiva_in_Mr._Reckless.jpg",
"title": "Mr. Reckless",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/harakiri-1919-fritz-lang",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Harakiri1919fritzLang",
"title": "Harakiri",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/parole-inc",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Parole%252C_Inc._%25281948%2529_poster.jpg",
"title": "Parole Inc",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/broken-blossoms-1919-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Broken_Blossoms.webm",
"title": "Broken Blossoms",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/manhattan-tower-0",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=ManhattanTower1932titlecard.jpg",
"title": "Manhattan Tower",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/they-made-me-a-killer",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=TheyMadeMeAKiller",
"title": "They Made Me a Killer",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/cops-buster-keaton",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=CopsbusterKeaton",
"title": "Cops",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-dough-and-dynamite",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dough_and_Dynamite_1914_Poster_de_la_pel%25C3%25ADcula.jpg",
"title": "Charlie Chaplins Dough And Dynamite",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/great-gabbo",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Greatgabbb.jpg",
"title": "Great Gabbo",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/last-alarm",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=J._Farrell_MacDonald_in_The_Last_Alarm.jpg",
"title": "Last Alarm",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/midnight-phantom",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Midnight_Phantom_lobby_card.jpg",
"title": "Midnight Phantom",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/made-for-each-other",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Made_for_Each_Other-_1939-_Poster.png",
"title": "Made for Each Other",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/yellowneck",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=Yellowneck1955cosmo",
"title": "Yellowneck",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/arthur-conan-doyle",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Conan_doyle.jpg",
"title": "Arthur Conan Doyle",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/smart-alecks-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=smart_alecks_ipod",
"title": "Smart Alecks",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-fatal-mallet",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Fatal_Mallet.jpg",
"title": "Charlie Chaplins The Fatal Mallet",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/wrong-road",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=wrong_road",
"title": "Wrong Road",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-mysterious-rider-aka-mark-of-the-avenger",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=mark_of_the_avenger_1938",
"title": "The Mysterious Rider AKA Mark of the Avenger",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/guns-of-the-law",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=guns_of_the_law",
"title": "Guns of the Law",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/his-first-flame",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=his_first_flame",
"title": "His First Flame",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/telephone-operator",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Telephone_Operator_poster.JPG",
"title": "Telephone Operator",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/fathers-little-dividend-1951",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=FathersLittleDividend1951",
"title": "Fathers Little Dividend",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/arizona-days",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Arizona_Days_%25281928%2529_-_McGowan_%2526_Custer.jpg",
"title": "Arizona Days",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/the-outlaw",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jane_Russell_in_The_Outlaw.jpg",
"title": "The Outlaw",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/eyes-in-the-night",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Eyes_in_the_Night_%25281942%2529_1.jpg",
"title": "Eyes in the Night",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/dementia-13-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=dementia13",
"title": "Dementia 13",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/my-wifes-relations",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_Keaton_in_My_Wife%2527s_Relations_%25281922%2529.jpg",
"title": "My Wifes Relations",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/spring-in-a-small-town-%E5%B0%8F%E5%9F%8E%E4%B9%8B%E6%98%A5",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Spring_in_a_Small_Town_poster.jpg",
"title": "Spring in a Small Town (小城之春)",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-charlott-et-le-mannequin-mabels-married-life",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mabel_married_life.jpg",
"title": "Charlie Chaplins Charlott Et Le Mannequin",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/teenagers-from-outer-space-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Teenagersouterspace.jpg",
"title": "Teenagers From Outer Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-animal-kingdom",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_Animal_Kingdom%252C_The_01.jpg",
"title": "The Animal Kingdom",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/nightmare-castle",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=NightmareCastle",
"title": "Nightmare Castle",
"year": 1965},
{"url": "http://publicdomainmovie.net/movie/home-town-story",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hometown11.jpg",
"title": "Home Town Story",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/flying-wild",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=flying_wild",
"title": "Flying Wild",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-musical-tramps",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_musical_career.jpg",
"title": "Charlie Chaplins Musical Tramps",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/hoosier-schoolboy-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=hoosier_schoolboy_ipod",
"title": "Hoosier Schoolboy",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/uptown-new-york",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Uptown_New_York_%2528film_poster%2529.jpg",
"title": "Uptown New York",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/sos-your-aunt-emma",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=sos_your_aunt_emma",
"title": "Sos Your Aunt Emma",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-balloonatic-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Balloonatic_%25281923%2529_1.jpg",
"title": "The Balloonatic",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/manos-the-hands-of-fate-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Manosposter.jpg",
"title": "Manos: The Hands of Fate",
"year": 1966},
{"url": "http://publicdomainmovie.net/movie/atomic-rulers-of-the-world",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=AtomicRulersOfTheWorld",
"title": "Atomic Rulers Of The World",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/raiders-of-old-california",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=RaidersofOldCalifornia_title.jpg",
"title": "Raiders of Old California",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/gang-bullets-1",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charles_Trowbridge_in_Gang_Bullets.jpg",
"title": "Gang Bullets",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/cavalcade-of-the-west",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hoot_Gibson_in_Cavalcade_of_the_West.jpg",
"title": "Cavalcade of the West",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-terror-widescreen-720p-hd",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=RogerCormansTheTerror720p",
"title": "The Terror",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/call-of-the-yukon-0",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=call_of_the_yukon_1938",
"title": "Call of the Yukon",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/maniac-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Maniac",
"title": "Maniac",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/capture-the",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=the_capture_ipod",
"title": "The Capture",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/extravagance-1930",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=June_Collyer_in_Extravagance.jpg",
"title": "Extravagance",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/downhill",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Downhill_1927",
"title": "Downhill",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/the-bat",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Bat_Vincent_Price",
"title": "The Bat",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/trapped",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Trapped_",
"title": "Trapped",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/detour",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Detour_%2528poster%2529.jpg",
"title": "Detour",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/monster-walks-the",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Monster_Walks_1932_poster.jpg",
"title": "The Monster Walks",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/beggars-in-ermine",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=beggars_in_ermine_ipod",
"title": "Beggars in Ermine",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/hellfire-austin",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hell_Fire_Austin_%25281932%2529_1.jpg",
"title": "Hellfire Austin",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/he-walked-by-night-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=He_Walked_by_Night_1948",
"title": "He Walked by Night",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/amazing-mr-x-the-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=amazing_mr_x_ipod",
"title": "The Amazing Mr. X",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-rounders",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Rounders_poster.jpg",
"title": "Charlie Chaplins The Rounders",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-his-trysting-place",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_Trysting_Place.jpg",
"title": "Charlie Chaplins His Trysting Place",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/riders-of-destiny",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ridersdestiny1933.jpg",
"title": "Riders of Destiny",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/attack-of-the-giant-leeches-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=AttackOfTheGiantLeeches1959",
"title": "Attack of the Giant Leeches",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/metropolis-1925-shorter-version",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Maria_from_metropolis.JPG",
"title": "Metropolis",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/wc-fields-the-dentist",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=W.c.Fields-TheDentist",
"title": "W.C. Fields - The Dentist",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/dixie-jamboree",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charles_Butterworth-Guy_Kibbee_in_Dixie_Jamboree.jpg",
"title": "Dixie Jamboree",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/smash-up-the-story-of-a-woman",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Smash-Up%252C_the_Story_of_a_Woman_%25281947%2529_1.jpg",
"title": "Smash-Up: The Story of a Woman",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/inspector-general-the",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=inspector_general_ipod_version",
"title": "The Inspector General",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/bowery-at-midnight",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bowery_At_Midnight_1942.jpg",
"title": "Bowery at Midnight",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-red-house",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=TheRedHouse_123",
"title": "The Red House",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/go-down-death",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=go_down_death",
"title": "Go Down, Death!",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/speak-easily-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Pressbook_herald_Speak_Easily_1932_Buster_Keaton_Jimmy_Durante_Thelma_Todd.jpg",
"title": "Speak Easily",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/sensation-hunters",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=sensation_hunters",
"title": "Sensation Hunters",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/a-shriek-in-the-night",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=A_Shriek_in_the_Night",
"title": "A Shriek in the Night",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-real-bruce-lee-1973",
"genre": "Biography",
"image": "http://publicdomainmovie.net/image.php?id=TheRealBruceLee1973",
"title": "The Real Bruce Lee",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/evel-knievel",
"genre": "Biography",
"image": "http://publicdomainmovie.net/image.php?id=EvelKnievel",
"title": "Evel Knievel",
"year": 1971},
{"url": "http://publicdomainmovie.net/movie/the-last-woman-on-earth",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=LastWonanOnEarthColor",
"title": "The Last Woman On Earth",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/pollyanna",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Pollyanna-still.JPG",
"title": "Pollyanna",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-rival-mashers-aka-those-love-pangs",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Those_Love_Pangs.jpg",
"title": "Charlie Chaplins The Rival Mashers aka Those Love Pangs",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/voyage-to-the-planet-of-prehistoric-women-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=VoyageToThePlanetOfPrehistoricWomen_20130813",
"title": "Voyage To The Planet Of Prehistoric Women",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/paradise-isle-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Movita_in_Paradise_Isle_%25281937%2529_1.jpg",
"title": "Paradise Isle",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/battleship-potemkin-1925",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vintage_Potemkin.jpg",
"title": "Battleship Potemkin",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/test-tube-babies",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Test_Tube_Babies_%25281948%2529_-_Title.jpg",
"title": "Test Tube Babies",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-mad-monster",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=The.Mad.Monster",
"title": "The Mad Monster",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-charlott-mabel-aux-courses-aka-gentlemen-of-nerve",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Gentlemen_of_nerve.jpg",
"title": "Charlie Chaplins Charlott Mabel Aux Courses",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/gung-ho",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Gung_Ho",
"title": "Gung Ho!",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/sabotage-0",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=sabotagemovie",
"title": "Sabotage",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/paradise-express",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dorothy_Appleby-Grant_Withers_in_Paradise_Express.jpg",
"title": "Paradise Express",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-man-from-utah",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Man_From_Utah_poster.jpg",
"title": "The Man from Utah",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-general-1926",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_general_movie_poster.jpg",
"title": "The General",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/the-green-promise",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Green_Promise",
"title": "The Green Promise",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/convict-13",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Keaton_Convict_13_1920.jpg",
"title": "Convict 13",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/revolt-of-the-zombies",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=RevoltoftheZombies_",
"title": "Revolt of the Zombies",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/neighbors",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Neighbors",
"title": "Neighbors",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/horrors-of-spider-island-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=HorrorsOfSpiderIsland",
"title": "Horrors of Spider Island",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-wizard-of-oz-1925",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=TheWizardOfOz1925",
"title": "The Wizard Of Oz",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/orphans-of-the-storm",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Orphans_of_the_Storm_1921_poster.jpg",
"title": "Orphans of the Storm",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/behind-office-doors",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mary_Astor-Ricardo_Cortez_in_Behind_Office_Doors.jpg",
"title": "Behind Office Doors",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/santa-claus-conquers-the-martians",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Santa_Claus_Conquers_the_Martians_1.jpg",
"title": "Santa Claus Conquers The Martians",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/the-big-combo",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Big_Combo_poster.jpg",
"title": "The Big Combo",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-atomic-brain",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheAtomicBrain",
"title": "The Atomic Brain",
"year": 1964},
{"url": "http://publicdomainmovie.net/movie/his-girl-friday-1940",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=HisGirlFriday-1940",
"title": "His Girl Friday - 1940",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-last-time-i-saw-paris-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Last_Time_I_Saw_Paris_2.jpg",
"title": "The Last Time I Saw Paris",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/das-wandernde-bild-1920-fritz-lang",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=DasWanderndeBild1920fritzLang",
"title": "Das Wandernde Bild",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/double-exposure-ipod",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=double_exposure_ipod",
"title": "Double Exposure",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/evel-knievel-0",
"genre": "Biography",
"image": "http://publicdomainmovie.net/image.php?id=EvelKnievel_842",
"title": "Evel Knievel",
"year": 1971},
{"url": "http://publicdomainmovie.net/movie/quicksand-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=quicksand.",
"title": "Quicksand",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/the-giant-gila-monster",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheGiantGilaMonster",
"title": "The Giant Gila Monster",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/shadows-of-death",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_Crabbe-Al_St._John_in_Shadows_of_Death.jpg",
"title": "Shadows of Death",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/mystery-liner",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Gustav_von_Seyffertitz_in_Mystery_Liner.jpg",
"title": "Mystery Liner",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/suddenly-1",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Suddenly_1954",
"title": "Suddenly",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/the-world-gone-mad",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=WorldGoneMad",
"title": "The World Gone Mad",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-monster-maker",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=The_Monster_Maker",
"title": "The Monster Maker",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/smart-alecks",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=smart_alecks",
"title": "Smart Alecks",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/bank-alarm-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bank_Alarm_FilmPoster.jpeg",
"title": "Bank Alarm",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/killers-from-space-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=KillersFromSpace",
"title": "Killers from Space",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/hell-fire-austin",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hell_Fire_Austin_%25281932%2529_1.jpg",
"title": "Hell Fire Austin",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/atom-age-vampire-1960",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=AtomAgeVampire1960",
"title": "Atom Age Vampire",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/hard-hombre",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=G._Raymond_Nye_in_Hard_Hombre.jpg",
"title": "Hard Hombre",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/go-for-broke-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Go_for_Broke",
"title": "Go for Broke!",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/house-of-mystery-1934",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=HouseOfMystery1934",
"title": "House Of Mystery",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/love-laughs-at-andy-hardy-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=andy_hardy",
"title": "Love Laughs at Andy Hardy",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/bowery-blitzkrieg-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=bowery_blitzkrieg_ipod",
"title": "Bowery Blitzkrieg",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-0",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=gullivers_travels1939_divx",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/daniel-boone-1936",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=DanielBoone1936",
"title": "Daniel Boone",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/zane-grey%C2%80%C2%99s-to-the-last-man-1933-randolph-scott",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=ZaneGreysToTheLastMan1933-RandolphScott",
"title": "To the Last Man",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-cat-and-the-canary",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Thecatandthecanary-windowcard-1927.jpg",
"title": "The Cat and the Canary",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/santa-fe-trail-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=Santa_Fe_Trail",
"title": "Santa Fe Trail",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/three-broadway-girls",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=David_Manners%252C_Madge_Evans%252C_Joan_Blondell%252C_Ina_Claire.jpg",
"title": "Three Broadway Girls",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-desert-trail",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Desert_trailWayne.jpg",
"title": "The Desert Trail",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/teenagers-battle-the-thing",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Teenagersouterspace.jpg",
"title": "Teenagers Battle The Thing",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-big-wheel",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=TheBigWheel",
"title": "The Big Wheel",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/charade",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Charade_1953",
"title": "Charade",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/eternal-evil",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=EternalEvil",
"title": "Eternal Evil",
"year": 1985},
{"url": "http://publicdomainmovie.net/movie/murder-in-harlem",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Murder_in_Harlem",
"title": "Murder in Harlem",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/the-little-shop-of-horrors-1",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=The.Little.Shop.of.Horrors.60",
"title": "The Little Shop of Horrors",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-poor-little-rich-girl",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Poor_Little_Rich_Girl.jpg",
"title": "The Poor Little Rich Girl",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/my-favorite-brunette-1",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=My_Favorite_Brunette_Xvid",
"title": "My Favorite Brunette",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/navy-way-the",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=navy_way",
"title": "The Navy Way",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/star-reporter-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=star_reporter_ipod",
"title": "Star Reporter",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/teenagers-from-outer-space-1959",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Teenagersouterspace.jpg",
"title": "Teenagers From Outer Space (1959)",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-great-dan-patch",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Great_Dan_Patch_%25281949%2529_1.jpg",
"title": "The Great Dan Patch",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/jack-and-the-beanstalk-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A%2526cjack.jpg",
"title": "Jack and the Beanstalk",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/santa-claus-versus-the-devil",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=SantaClausversusTheDevil",
"title": "Santa Claus Vs. the Devil",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/three-guys-named-mike-1951",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=ThreeGuysNamedMike1951",
"title": "Three Guys Named Mike",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/things-to-come-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=ThingsToCome_835",
"title": "Things To Come",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/hats-off",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Luis_Alberni_in_Hats_Off.jpg",
"title": "Hats Off",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/wheres-that-fire-1940",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=WheresThatFire1940",
"title": "Wheres That Fire",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-fall-of-the-house-of-usher",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Fall_of_the_House_of_Usher1928filmshot.png",
"title": "The Fall of the House of Usher",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/high-voltage",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=High_Voltage_%25281929%2529.webm",
"title": "High Voltage",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/the-big-lift",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Cornell_Borchers-Montgomery_Clift_in_The_Big_Lift.jpg",
"title": "The Big Lift",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/midnight-manhunt",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Midnight.Manhunt",
"title": "Midnight Manhunt",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/gang-bullets",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charles_Trowbridge_in_Gang_Bullets.jpg",
"title": "Gang Bullets",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-violent-years",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Violent_Years_poster.jpg",
"title": "The Violent Years",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/broken-blossoms-1919",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Broken_Blossoms.webm",
"title": "Broken Blossoms",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/isle-of-destiny",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=June_Lang_in_Isle_of_Destiny.jpg",
"title": "Isle of Destiny",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/secret-valley",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Willie_Fung-Virginia_Grey_in_Secret_Valley.jpg",
"title": "Secret Valley",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-big-lift-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Cornell_Borchers-Montgomery_Clift_in_The_Big_Lift.jpg",
"title": "The Big Lift",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/sky-patrol-0",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=sky_patrol",
"title": "Sky Patrol",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-driller-killer-96-minute-uncut-version",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheDrillerKiller",
"title": "The Driller Killer",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/dementia-13-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=RogerCormansDementiaThirteen720p",
"title": "Dementia 13",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/the-three-musketeers",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_three_musketeers_fairbanks.jpg",
"title": "The Three Musketeers",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-bat-1926",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheBat1926",
"title": "The Bat",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/rocky-mountain-mystery",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=Rocky_Mountain_Mystery_1935",
"title": "Rocky Mountain Mystery",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/cheers-for-miss-bishop",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mary_Anderson_in_Cheers_for_Miss_Bishop.jpg",
"title": "Cheers for Miss Bishop",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/navy-blues",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=NavyBlues1937titlecard.jpg",
"title": "Navy Blues",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-secret-of-doctor-kildare",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Secret_of_Doctor_Kildare_1939_2138",
"title": "The Secret of Doctor Kildare",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/headin-home",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Heading_Home_%25281920%2529_-_5.jpg",
"title": "Headin Home",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-the-woman-in-green",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "Sherlock Holmes - The Woman In Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-kennel-murder-case-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Kennel_Murder_Case_1933",
"title": "The Kennel Murder Case",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-scar-1",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Scar-1948",
"title": "The Scar",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/earthworm-tractors-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Earthworm_Tractors_%25281936%2529_1.jpg",
"title": "Earthworm Tractors",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/one-body-too-many-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=One_Body_Too_Many",
"title": "One Body Too Many",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/his-new-profession",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_New_Profession.jpg",
"title": "His New Profession",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/the-dance-of-life",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_The_Dance_of_Life.jpg",
"title": "The Dance of Life",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/bulldog-drummonds-bride",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bulldog_Drummond%2527s_Bride_%25281939%2529_1.jpg",
"title": "Bulldog Drummonds Bride",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/aces-and-eights-ipod-version",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Aces_and_Eights_poster.jpg",
"title": "Aces and Eights",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/a-shriek-in-the-night-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=A.Shriek.in.the.Night",
"title": "A Shriek in the Night",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-flapper",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Flapper_%25281920%2529.webm",
"title": "The Flapper",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/a-fool-there-was",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fooltherewas1915movieposter.jpg",
"title": "A Fool There Was",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/dangerous-passage",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=dangerous_passage",
"title": "Dangerous Passage",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/one-body-too-many-1",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=One_Body_Too_Many_1944",
"title": "One Body Too Many",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/manhattan-tower",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=ManhattanTower1932titlecard.jpg",
"title": "Manhattan Tower",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-magic-cloak-of-oz",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Magic_Cloak_of_Oz_%25281914%2529.webm",
"title": "The Magic Cloak of Oz",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/car-of-dreams-1935",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=CarOfDreams1935",
"title": "Car of Dreams",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/plan-9-from-outer-space-1958",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Plan_9_Alternative_poster.jpg",
"title": "Plan 9 from Outer Space",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/penny-serenade-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=PennySerenade",
"title": "Penny Serenade",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/dark-alibi",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Dark_Alibi_1946",
"title": "Dark Alibi",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/gamblers-choice",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=gamblers_choice",
"title": "Gamblers Choice",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/million-dollar-weekend",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Gene_Raymond-Osa_Massen_in_Million_Dollar_Weekend.jpg",
"title": "Million Dollar Weekend",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-woman-in-green-2",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "The Woman in Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/blonde-ice",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=BlondeIce",
"title": "Blonde Ice",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/the-nut",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-nut-1921.jpg",
"title": "The Nut",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/between-showers",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=BetweenShowers",
"title": "Between Showers",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/terror-by-night-2",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Terror_by_Night_1946.jpg",
"title": "Terror By Night",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/strike",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Strike2.JPG",
"title": "Strike",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/trapped-by-television-1936",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TrappedByTelevision1936",
"title": "Trapped By Television",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/sister-streetfighter-1974",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=SisterStreetfighter1974",
"title": "Sister Streetfighter",
"year": 1974},
{"url": "http://publicdomainmovie.net/movie/kansas-city-confidential-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Kansas_City_Confidential_1952",
"title": "Kansas City Confidential",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/the-ace-of-hearts",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Ace_of_Hearts_%25281921%2529.webm",
"title": "The Ace of Hearts",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-rage-of-paris",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Rage_of_Paris_Poster.jpg",
"title": "The Rage of Paris",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-lodger-a-story-of-the-london-fog",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=AHtheLodger",
"title": "The Lodger: A Story of the London Fog",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/joyless-street",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Greta_Garbo04.jpg",
"title": "Joyless Street",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/the-capture",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Capture_movie",
"title": "The Capture",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/pecks-bad-boy",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jackie_Cooper_in_Pecks_Bad_Boy.png",
"title": "Pecks Bad Boy",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/wives-under-suspicion",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Wives_Under_Suspicion_poster3.jpg",
"title": "Wives under Suspicion",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/horror-express-with-spanish-subtitles",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=HorrorExpresswithSpanishSubtitles",
"title": "Horror Express (with spanish subtitles)",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/a-days-pleasure",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=CC_Day%2527s_Pleasure_1919.jpg",
"title": "A Days Pleasure",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/backstage",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Backstage_928",
"title": "Backstage",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/timber-queen",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=timber_queen",
"title": "Timber Queen",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/mark-of-the-avenger",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=mark_of_the_avenger",
"title": "Mark of the Avenger",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/seven-were-saved",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Seven_Were_Saved_%25281947%2529_1.jpg",
"title": "Seven Were Saved",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/arizona-days-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=Arizona_Days",
"title": "Arizona Days",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/power-dive",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Screen_shot_Power_Dive.png",
"title": "Power Dive",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/a-stranger-in-town",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Stranger_in_Town_%25281943%2529_1.jpg",
"title": "A Stranger in Town",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/the-haunted-house-1921",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Haunted_house1921.jpg",
"title": "The Haunted House",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/frolics-on-ice",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Edgar_Kennedy_in_Everything%2527s_on_Ice.jpg",
"title": "Frolics On Ice",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/cleopatra",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Helen_Gardner_as_Cleopatra.jpg",
"title": "Cleopatra",
"year": 1912},
{"url": "http://publicdomainmovie.net/movie/easy-virtue",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=EasyVirtue1928",
"title": "Easy Virtue",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/something-to-sing-about",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=SomethingtoSingAbout",
"title": "Something to Sing About",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-trap",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Trap_%25281946%2529_-_Title.jpg",
"title": "The Trap",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-big-trees",
"genre": "Action",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Kirk_douglas_big_trees04.jpg",
"title": "The Big Trees",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/chained-for-life",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=ChainedforLife",
"title": "Chained for Life",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/the-little-princess",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Little_Princess_1939.jpg",
"title": "The Little Princess",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/lady-windermeres-fan",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=LadyWindermeresFan",
"title": "Lady Windermeres Fan",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/the-mystery-man",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Mystery_Man_1935",
"title": "The Mystery Man",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/the-moonstone",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=TheMoonstone",
"title": "The Moonstone",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/desert-phantom",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Desert_Phantom_FilmPoster.jpeg",
"title": "Desert Phantom",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/hi-diddle-diddle",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hi_Diddle_Diddle_%25281943%2529_1.jpg",
"title": "Hi Diddle Diddle",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/jesse-james-meets-frankensteins-daughter-1966",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=JesseJamesMeetsFrankensteinsDaughter1966",
"title": "Jesse James Meets Frankensteins Daughter",
"year": 1966},
{"url": "http://publicdomainmovie.net/movie/king-kelly-of-the-usa",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=KingKellyoftheUSA",
"title": "King Kelly of the U.S.A.",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-little-shop-of-horrors",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=The_Little_Shop_of_Horrors_60",
"title": "The Little Shop of Horrors",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-phantom-planet-0",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=ThePhantomPlanet",
"title": "The Phantom Planet",
"year": 1961},
{"url": "http://publicdomainmovie.net/movie/the-red-house-ipod",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=the_red_house_ipod",
"title": "The Red House",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/fighting-caravans-1931",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fighting_Caravans_1931_Poster.jpg",
"title": "Fighting Caravans",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/a-christmas-carol-1910",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Chritmas_Carol_%25281910%2529.ogv",
"title": "A Christmas Carol",
"year": 1910},
{"url": "http://publicdomainmovie.net/movie/peliculas-de-chaplin",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=PeliculasDeChaplin-1914",
"title": "Peliculas de Chaplin",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/indestructible-man-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Indestructible_Man_%25281956%2529_still_1.jpg",
"title": "Indestructible Man",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/hoosier-schoolboy",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=hoosier_schoolboy",
"title": "Hoosier Schoolboy",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/dr-christian-meets-the-women",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=dr_christian_meets_the_women",
"title": "Dr. Christian Meets The Women",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/battleship-potemkin-0",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vintage_Potemkin.jpg",
"title": "Battleship Potemkin",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/the-lost-world-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lost_World_%25281925%2529_-_film_poster.jpg",
"title": "The Lost World",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/afgrunden-the-abyss",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Afgrunden_1910",
"title": "The Abyss",
"year": 1910},
{"url": "http://publicdomainmovie.net/movie/dark-journey-1937",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dark_Journey_FilmPoster.jpeg",
"title": "Dark Journey",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/rage-at-dawn-1",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=Rage_at_Dawn_movie",
"title": "Rage at Dawn",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-great-flamarion",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=GreatFlamarion.jpg",
"title": "The Great Flamarion",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/terror-by-night",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=TerrorByNight",
"title": "Terror By Night",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-hands-of-orlac-1924",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheHandsOfOrlac1924",
"title": "The Hands of Orlac",
"year": 1924},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die-theatrical-version-82-min",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die Theatrical Version",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/the-stranger-720p-hd",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Stranger_1946_%25283%2529.jpg",
"title": "The Stranger",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/guest-in-the-house-1944-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=GuestInTheHouse1944_573",
"title": "Guest in the House",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/people-are-funny",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=PeopleareFunny_",
"title": "People Are Funny",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls-ipod-video-version",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=carnival_of_souls",
"title": "Carnival of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/the-phantom-carriage",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=ThePhantomCarriage",
"title": "The Phantom Carriage",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/dr-mabuse-the-gambler-dr-mabuse-der-spieler-1922-part-1",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%25D0%2594%25D0%25BE%25D0%25BA%25D1%2582%25D0%25BE%25D1%2580_%25D0%259C%25D0%25B0%25D0%25B1%25D1%2583%25D0%25B7%25D0%25BE_%25D0%25BF%25D0%25BB%25D0%25B0%25D0%25BA%25D0%25B0%25D1%2582_%25D0%259C%25D0%25B0%25D0%25BB%25D0%25B5%25D0%25B2%25D0%25B8%25D1%2587%25D0%25B0.jpg",
"title": "Dr. Mabuse, The Gambler (Dr. Mabuse der Spieler) Part 1",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/the-eagle",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=the_eagle",
"title": "The Eagle",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/sin-takes-a-holiday",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sin_Takes_a_Holiday_%25281930%2529_lobby_card_1.jpg",
"title": "Sin Takes A Holiday",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/broken-blossoms",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Broken_Blossoms.webm",
"title": "Broken Blossoms",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/desperate-cargo",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Screen_shot_Desperate_Cargo.png",
"title": "Desperate Cargo",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/lying-lips",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Lying_Lips_Poster_1939.jpg",
"title": "Lying Lips",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-death-kiss",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Thedeathkissposter.jpg",
"title": "The Death Kiss",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/robert-wienes-genuine-a-tale-of-a-vampire-1920",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Genuine1920.jpg",
"title": "GENUINE A Tale of a Vampire",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/lil-abner-1",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Billie_Seward-Jeff_York_in_Li%2527l_Abner.jpg",
"title": "Lil Abner",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-woman-in-green-1",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "The Woman in Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/mr-motos-last-warning-complete-upgraded",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Mr.MotosLastWarningcompleteUpgraded",
"title": "Mr. Motos Last Warning",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/dressed-to-kill-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=DressedToKill1946",
"title": "Dressed To Kill",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/c-man",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=C-Man",
"title": "C-Man",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/invasion-of-the-bee-girls-widescreen-quality-upgrade",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=InvasionOfTheBeeGirlsWidescreenQualityUpgrade",
"title": "INVASION OF THE BEE GIRLS",
"year": 1973},
{"url": "http://publicdomainmovie.net/movie/his-wedding-night",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hisweddingnight-newspaperad-1917.jpg",
"title": "His Wedding Night",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/letter-of-introduction",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=LetterofIntroduction",
"title": "Letter of Introduction",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/no-hands-on-the-clock",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=No_Hands_on_the_Clock_1941",
"title": "No Hands on the Clock",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-haunted-house",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Haunted_house1921.jpg",
"title": "The Haunted House",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/rogue-of-the-range",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Rogue_of_the_Range_%25281936%2529_-_Title.jpg",
"title": "Rogue Of The Range",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-thirteenth-guest",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=The_Thirteenth_Guest",
"title": "The Thirteenth Guest",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/night-of-the-living-dead-1968-restored",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=NightOfTheLivingDead1968-Restored",
"title": "Night of the Living Dead",
"year": 1968},
{"url": "http://publicdomainmovie.net/movie/the-balloonatic",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Balloonatic_%25281923%2529_1.jpg",
"title": "The Balloonatic",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/the-general-line",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Eisenstein-TheGeneralLine",
"title": "The General Line",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/scared-stiff-treasure-of-fear",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Scared_Stiff",
"title": "Scared Stiff (Treasure of Fear)",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-fat-spy",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TheFatSpy",
"title": "The Fat Spy",
"year": 1966},
{"url": "http://publicdomainmovie.net/movie/why-change-your-wife",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_Why_Change_Your_Wife_01.jpg",
"title": "Why Change Your Wife?",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/in-the-park-tinted",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=In_the_Park_%2528poster%2529.jpg",
"title": "In the Park",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/gang-bullets-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Charles_Trowbridge_in_Gang_Bullets.jpg",
"title": "Gang Bullets",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-woman-in-green",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "The Woman In Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/devil-monster",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=DevilMonster1946",
"title": "Devil Monster",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/wagon-wheels",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=WagonWheels",
"title": "WAGON WHEELS",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/sky-high-1922",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Skyhigh1922-newspaperad.jpg",
"title": "Sky High",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/carson-city-kid",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Roy_Rogers_in_The_Carson_City_Kid.jpg",
"title": "Carson City Kid",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-painted-desert",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Painted-Desert-1931.jpg",
"title": "The Painted Desert",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-1",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=GulliversTravels720p",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/tomorrows-youth",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=tomorrows_youth",
"title": "Tomorrows Youth",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/zwenigora-%D0%B7%D0%B2%D0%B5%D0%BD%D0%B8%D0%B3%D0%BE%D1%80%D0%B0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Zwenigora",
"title": "Zwenigora (Звенигора)",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/the-gorilla-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=The.Gorilla",
"title": "The Gorilla",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-beloved-rogue",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_The_Beloved_Rogue.jpg",
"title": "The Beloved Rogue",
"year": 1927},
{"url": "http://publicdomainmovie.net/movie/white-zombie-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/storm-in-a-teacup",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=StormInATeacup1937",
"title": "Storm in a Teacup",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-buckskin-lady",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Buckskin_Lady_%25281957%2529_1.jpg",
"title": "The Buckskin Lady",
"year": 1957},
{"url": "http://publicdomainmovie.net/movie/the-vampire-bat-1933-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vampirebat.jpg",
"title": "The Vampire Bat",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/voyage-to-the-planet-of-prehistoric-women-weirdness-bad-movie",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=VoyageToThePlanetOfPrehistoricWomen-WeirdnessBadMovie",
"title": "Voyage to the Planet of Prehistoric Women",
"year": 1967},
{"url": "http://publicdomainmovie.net/movie/port-of-missing-girls",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=Port_of_Missing_Girls_1938",
"title": "Port of Missing Girls",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-cheat",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Cheat_FilmPoster.jpeg",
"title": "The Cheat",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/are-crooks-dishonest",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Are_crooks_dishonest_%25281918%2529.jpg",
"title": "Are Crooks Dishonest?",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/dark-mountain",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Dark_Mountain_1944",
"title": "Dark Mountain",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/beneath-the-12-mile-reef-1953-full-screen-version",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beneath_the_12-Mile_Reef_%25281953%2529_2.jpg",
"title": "Beneath the 12-Mile Reef - Full Screen Version",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/headless-horseman",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Headless_Horseman_1922.jpg",
"title": "Headless Horseman",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/within-our-gates",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Within_Our_Gates_1920_newspaper_ad.jpg",
"title": "Within Our Gates",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/red-lights-ahead",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Roger_Imhof_in_Red_Lights_Ahead.jpg",
"title": "Red Lights Ahead",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/angel-and-the-badman-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Angel_badman.jpg",
"title": "Angel and the Badman",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/manhattan-love-song",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=ManhattanLoveSong",
"title": "Manhattan Love Song",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/i-cover-the-waterfront-1933",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=ICoverTheWaterfront1933",
"title": "I Cover the Waterfront",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-high-sign",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TheHighSign",
"title": "The High Sign",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/boys-will-be-boys",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%2522Boys_Will_Be_Boys%2522_%25281935%2529.jpg",
"title": "Boys Will Be Boys",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/tormented",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Tormented_861",
"title": "Tormented",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/nancy-drew-reporter-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=NancyDrewReporter",
"title": "Nancy Drew ...Reporter",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/judge-priest-1934",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=JudgePriest1934",
"title": "Judge Priest",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/murder-by-television-1935",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=MurderByTelevision1935",
"title": "MURDER BY TELEVISION",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/beau-ideal",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=LesterVailRalphForbesInBeauIdeal.jpg",
"title": "Beau Ideal",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/moon-of-the-wolf-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=MoonoftheWolfMPEG2",
"title": "Moon of the Wolf",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/the-big-lift-complete",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Cornell_Borchers-Montgomery_Clift_in_The_Big_Lift.jpg",
"title": "The Big Lift (complete)",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/mother",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Mother_883",
"title": "Mother",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/the-screaming-skull-1958",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_for_The_Screaming_Skull.jpg",
"title": "The Screaming Skull",
"year": 1958},
{"url": "http://publicdomainmovie.net/movie/the-light-of-western-stars",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=TheLightofWesternStars_",
"title": "The Light of Western Stars",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/to-the-shores-of-iwo-jima",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=To_the_Shores_of_Iwo_Jima_titlecard.jpg",
"title": "To The Shores of Iwo Jima",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/man-of-the-forest",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_Man_of_the_Forest_%25281933%2529_01.jpg",
"title": "Man of the Forest",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/eegah-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Eegah.1962",
"title": "Eegah",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/phantom-carraige",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=PhantomCarraige",
"title": "Phantom Carraige",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-painted-desert-1931-clark-gable",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Painted-Desert-1931.jpg",
"title": "The Painted Desert",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-1939",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=GulliversTravels720p1939",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-trail-beyond-1934",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Trail-Beyond-1934.jpg",
"title": "The Trail Beyond",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-emperor-jones",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Paul_Robeson_in_The_Emperor_Jones.jpg",
"title": "The Emperor Jones",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/red-haired-alibi",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=RedHairedAlibi",
"title": "Red Haired Alibi",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-desert-trail-1935",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Desert_trailWayne.jpg",
"title": "The Desert Trail",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/borderline-1950",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Borderline_%25281950_1.jpg",
"title": "Borderline",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/the-little-shop-of-horrors-1960",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TheLittleShopOfHorrors1960_765",
"title": "The Little Shop of Horrors",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/kept-husbands",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Joel_McCrea-Dorothy_Mackaill_in_Kept_Husbands.jpg",
"title": "Kept Husbands",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/his-royal-highness",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=HisRoyalHighness",
"title": "His Royal Highness",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/i-cover-the-waterfront",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=I_Cover_the_Waterfront",
"title": "I Cover the Waterfront",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/flying-blind",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=FlyingBlind",
"title": "Flying Blind",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/algiers",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Campbell-Playhouse-Algiers.jpg",
"title": "Algiers",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/id-give-my-life-1936",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=IdGiveMyLife1936",
"title": "Id Give My Life",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/whens-your-birthday",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=When%2527s_Your_Birthday_lobby_card_1937.JPG",
"title": "Whens Your Birthday?",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-vampire-bat",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vampirebat.jpg",
"title": "The Vampire Bat",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/heroes-in-blue",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=heroes_in_blue",
"title": "Heroes in Blue",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/broadway-to-cheyenne",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=BroadwaytoCheyenne",
"title": "Broadway to Cheyenne",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-screaming-skull",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_for_The_Screaming_Skull.jpg",
"title": "The Screaming Skull",
"year": 1958},
{"url": "http://publicdomainmovie.net/movie/bulldog-drummonds-bride-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bulldog_Drummond%2527s_Bride_%25281939%2529_1.jpg",
"title": "Bulldog Drummonds Bride",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die-extended-version-1962",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die - extended version",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/sinners-in-paradise",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=SinnersinParadise",
"title": "Sinners in Paradise",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/driller-killer-1979",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Driller_Killer.jpg",
"title": "Driller Killer",
"year": 1979},
{"url": "http://publicdomainmovie.net/movie/the-idle-class",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Idle_Class.jpg",
"title": "The Idle Class",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/manslaughter",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Manslaughter-1922.jpg",
"title": "Manslaughter",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/dixiana-1930",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=Dixiana1930",
"title": "Dixiana",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/the-return-of-draw-egan",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Return_of_Draw_Egan_poster.jpg",
"title": "The Return of Draw Egan",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/sunday-calm-1923-our-gang-silent-film",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Our_Gang_production_still_%25281923%2529.jpg",
"title": "Sunday Calm",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/the-night-of-counting-the-years-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=The_Night_of_Counting_the_Years_DVD",
"title": "The Night of Counting the Years",
"year": 1969},
{"url": "http://publicdomainmovie.net/movie/little-lord-fauntleroy",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=MaryPickford4.jpg",
"title": "Little Lord Fauntleroy",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/his-double-life-1933-re-uploaded-trimmed-to-1hr8min",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=HisDoubleLife1933re-uploaded-trimmedTo1hr8min",
"title": "His Double Life",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/charlie-chaplins-the-rink-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Rink_%2528poster%2529.jpg",
"title": "Charlie Chaplins The Rink",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-baffled",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_Baffled.jpg",
"title": "Sherlock Holmes Baffled",
"year": 1900},
{"url": "http://publicdomainmovie.net/movie/the-monster-walks",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Monster_Walks_1932_poster.jpg",
"title": "The Monster Walks",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/manfish",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Manfish_",
"title": "Manfish",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/our-hospitality",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=OurHospitality_29",
"title": "Our Hospitality",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/the-sun-sets-at-dawn-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Sun_Sets_at_Dawn_movie",
"title": "The Sun Sets at Dawn",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/hearts-in-exile",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hearts_in_Exile_-_1915_-_newspaperad.jpg",
"title": "Hearts in Exile",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/the-vampire-bat-1933",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vampirebat.jpg",
"title": "The Vampire Bat",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/killers-from-space-1",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=Killers_from_Space_1954",
"title": "Killers from Space",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/five-minutes-to-love",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=FiveMinutestoLove",
"title": "Five Minutes To Love",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/peliculas-de-chaplin-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=PeliculasDeChaplin-1915",
"title": "Peliculas de Chaplin",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/east-side-kids-1940",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Million_Dollar_Kid_%25281944%2529_1.jpg",
"title": "EAST SIDE KIDS",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-avenging-conscience",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Avenging_Conscience_movie_poster.jpg",
"title": "The Avenging Conscience",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/a-corner-in-wheat",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Corner_in_Wheat.webm",
"title": "A Corner in Wheat",
"year": 1909},
{"url": "http://publicdomainmovie.net/movie/beggars-in-ermine-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=beggars_in_ermine",
"title": "Beggars in Ermine",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-phantom-empire",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Phantom-Empire-Poster-1935.jpg",
"title": "The Phantom Empire",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-the-woman-in-green-vost-fr",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "Sherlock Holmes- The woman in green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/penny-serenade-1",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=penny-serenade",
"title": "Penny Serenade",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/bliss",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bliss_1917_HAROLD_LLOYD_BEBE_DANIELS_SNUB_POLLARD_Alfred_J_Goulding.webm",
"title": "Bliss",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-secret-weapon",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_and_the_Secret_Weapon_2.jpg",
"title": "The Secret Weapon",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/the-amazing-adventure-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=TheAmazingAdventure",
"title": "The Amazing Adventure",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-private-life-of-henry-viii-1933",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Private-Life-Henry-VIII.jpg",
"title": "The Private Life of Henry VIII",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-deerslayer-1920",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheDeerslayer1920",
"title": "The Deerslayer",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/sunny",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sunny_%25281941_film%2529_poster_1.jpg",
"title": "Sunny",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-joe-louis-story",
"genre": "Biography",
"image": "http://publicdomainmovie.net/image.php?id=TheJoeLouisStory",
"title": "The Joe Louis Story",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/monte-carlo-nights",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Yola_d%2527Avril_in_Monte_Carlo_Nights.jpg",
"title": "Monte Carlo Nights",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/desert-gold",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=DesertGold",
"title": "Desert Gold",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-big-show",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Big_Show_Poster.jpg",
"title": "The Big Show",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-soilers",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Soilers_%25281923%2529.ogv",
"title": "The Soilers",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/africa-speaks-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Africa_Speaks_poster_1930.jpg",
"title": "Africa Speaks",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/the-duke-is-tops",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=TheDukeisTops",
"title": "The Duke is Tops",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/i-live-on-danger",
"genre": "Thriller",
"image": "http://publicdomainmovie.net/image.php?id=I_Live_on_Danger_1942",
"title": "I Live on Danger",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/horror-express-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=HorrorExpress",
"title": "Horror Express",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/the-road-to-ruin",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=TheRoadToRuin_457",
"title": "The Road To Ruin",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=CarnivalOfSouls_823",
"title": "Carnival of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/anna-liisa",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=AnnaLiisa",
"title": "Anna-Liisa",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/manhattan-tower-1932-restored",
"genre": "Romance",
"image": "http://publicdomainmovie.net/wikimedia.php?id=ManhattanTower1932titlecard.jpg",
"title": "Manhattan Tower",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/little-lord-fauntleroy-1936",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=LittleLordFauntleroy720p1936",
"title": "Little Lord Fauntleroy",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/discarded-lovers",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=Discarded_Lovers_1932",
"title": "Discarded Lovers",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/dressed-to-kill-720p-1946",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=DressedToKill720p1946",
"title": "Dressed To Kill",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/pot-o-gold-vostfr",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=James_Stewart-Paulette_Goddard_in_Pot_o%2527_Gold.jpg",
"title": "Pot o Gold",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-ghoul-better-copy",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheGhoul-BetterCopy",
"title": "The Ghoul",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/attack-of-the-mushroom-people-1963",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=PhantasmagoriaTheater-AttackOfTheMushroomPeople1963147",
"title": "Attack of the Mushroom People",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/three-came-home-0",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=3CameHomePoster.jpg",
"title": "Three Came Home",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/goketsu-jiraiya-jiraiya-the-hero-1921",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=GoketsuJiraiyajiraiyaTheHero1921",
"title": "Goketsu Jiraiya (Jiraiya the Hero)",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-painted-desert-1931",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Painted-Desert-1931.jpg",
"title": "The Painted Desert",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/frankenstein-2",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Frank-1910bis.jpg",
"title": "Frankenstein",
"year": 1910},
{"url": "http://publicdomainmovie.net/movie/his-wifes-relations",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_Keaton_in_My_Wife%2527s_Relations_%25281922%2529.jpg",
"title": "His Wifes Relations",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/just-neighbors",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Just_Neighbors",
"title": "Just Neighbors",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/santa-fe-trail-1940",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=SantaFeTrail1940",
"title": "Santa Fe Trail",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/skyway",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=Skyway_1933",
"title": "Skyway",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/sitasingstheblues",
"genre": "Animation",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sita_STB_Poster.jpg",
"title": "Sita_Sings_the_Blues",
"year": 2008},
{"url": "http://publicdomainmovie.net/movie/miss-lulu-bett",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Miss_Lulu_Bett",
"title": "Miss Lulu Bett",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/one-year-later",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=DeWitt_Jennings_in_One_Year_Later.jpg",
"title": "One Year Later",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-ape-1940",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Theape.jpg",
"title": "The Ape",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/impact-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Impact_1949",
"title": "Impact",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/paradise-island-1930",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=ParadiseIsland1930",
"title": "Paradise Island",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/the-terror-widescreen",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheTerrorwidescreen",
"title": "The Terror",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/tombstone-canyon-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=TombstoneCanyon",
"title": "Tombstone Canyon",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/abraham-lincoln-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=AbrahamLincoln_877",
"title": "Abraham Lincoln",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/high-powered-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=High_Powered",
"title": "High Powered",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/number-please",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Number%252C_please_%253F_2.jpg",
"title": "Number, Please?",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/delinquent-daughters",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Delinquentdaughters22.jpg",
"title": "Delinquent Daughters",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/dementia-13-3",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Dementia131963",
"title": "Dementia 13",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/gulliverstravels1939vostfr",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=gullivers_travels1939_vostfr",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-gorilla",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Gorilla_%25281939%2529_1.jpg",
"title": "The Gorilla",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/white-zombie-1932",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/out-west",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Out_West_1918.JPG",
"title": "Out West",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/beneath-the-12-mile-reef-1953",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beneath_the_12-Mile_Reef_%25281953%2529_still_1.jpg",
"title": "Beneath the 12-Mile Reef",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/the-racketeer",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hedda_Hopper_and_Carole_Lombard_in_The_Racketeer.jpg",
"title": "The Racketeer",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/the-bat-1926-with-soundtrack",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheBat1926WithSoundtrack",
"title": "The Bat (with soundtrack)",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/bulldog-drummond-720p-1937",
"genre": "Action",
"image": "http://publicdomainmovie.net/image.php?id=BulldogDrummond720p1937",
"title": "Bulldog Drummond",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-and-the-secret-weapon-0",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_and_the_Secret_Weapon_2.jpg",
"title": "Sherlock Holmes and the Secret Weapon",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/the-wild-ride",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=TheWildRide_755",
"title": "The Wild Ride",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/dirty-gertie-from-harlem-usa",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=DirtyGertie",
"title": "Dirty Gertie from Harlem U.S.A.",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/horror-express-1972",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=HorrorExpress1972",
"title": "Horror Express",
"year": 1972},
{"url": "http://publicdomainmovie.net/movie/tormented-weirdness-bad-movie",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Tormented-WeirdnessBadMovie",
"title": "Tormented",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-inspector-general-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Inspector_General.jpg",
"title": "The Inspector General",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/eyes-in-the-night-720p-1942",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Eyes_in_the_Night_%25281942%2529_1.jpg",
"title": "Eyes In The Night",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/invisible-ghost-1941",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=InvisibleGhost1941",
"title": "Invisible Ghost",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/get-that-man-1935",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=GetThatMan1935",
"title": "Get That Man",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/killers-from-space-2",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=KillersFromSpace_201405",
"title": "Killers From Space",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/carnival-of-souls-720p-1962",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=CarnivalOfSouls720p1962",
"title": "Carnival Of Souls",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/flirting-with-fate",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Flirting_with_Fate.jpg",
"title": "Flirting with Fate",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/juke-joint",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=JukeJoint",
"title": "Juke Joint",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/the-last-time-i-saw-paris-1",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=thelasttimeisawparis_cco",
"title": "The Last Time I Saw Paris",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/charleys-big-hearted-aunt-1940",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%2522Charley%2527s_%2528Big-Hearted%2529_Aunt%2522_%25281940%2529.jpg",
"title": "Charleys (Big-Hearted) Aunt",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-fat-spy-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TheFatSpyMPEG2",
"title": "The Fat Spy",
"year": 1966},
{"url": "http://publicdomainmovie.net/movie/never-weaken",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Never_Weaken_FilmPoster.jpeg",
"title": "Never Weaken",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-dawn-rider-1935",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_The_Dawn_Rider.jpg",
"title": "The Dawn Rider",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/zorros-black-whip-chapter-12-trail-of-treachery",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Zorrosblackwhip.jpg",
"title": "Zorros Black Whip - Chapter 12 Trail of Treachery",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/the-phantom-in-the-house-1929",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=ThePhantomInTheHouse1929",
"title": "The Phantom In The House",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/the-fall-of-the-house-of-usher-1928",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Fall_of_the_House_of_Usher1928filmshot.png",
"title": "The Fall of the House of Usher",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/dr-jekyll-and-mr-hyde-1912",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dr._Jekyll_and_Mr._Hyde_%25281912%2529.webm",
"title": "Dr. Jekyll And Mr. Hyde",
"year": 1912},
{"url": "http://publicdomainmovie.net/movie/the-lost-world-complete-video-quality-upgrade",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lost_World_%25281925%2529_-_film_poster.jpg",
"title": "The Lost World",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/rhythm-in-the-clouds",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Richard_Carle_in_Rhythm_in_the_Clouds.jpg",
"title": "Rhythm in the Clouds",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/beat-the-devil-plus-fort-que-le-diable-vost-fr",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Beatthedevil01.jpg",
"title": "Beat the Devil",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/three-steps-north",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Three_Steps_North_1951",
"title": "Three Steps North",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/the-brain-that-wouldnt-die-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "The Brain That Wouldnt Die",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/la-souriante-madame-beudet",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=LaSourianteMadameBeudet",
"title": "La Souriante Madame Beudet",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/the-adventures-of-dollie",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_adventures_of_Dollie.webm",
"title": "The Adventures of Dollie",
"year": 1908},
{"url": "http://publicdomainmovie.net/movie/angel-and-the-badman-ipod-version",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Angel_badman.jpg",
"title": "Angel and the Badman iPod version",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-golden-eye-video-quality-upgrade",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=TheGoldenEyeVideoQualityUpgrade",
"title": "THE GOLDEN EYE",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/band-waggon-1940",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%2522Band_Waggon%2522_%25281940%2529.jpg",
"title": "Band Waggon",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/king-arthur-was-a-gentleman-1942",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=KingArthurWasAGentleman1942",
"title": "King Arthur Was a Gentleman",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/boys-reformatory",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=BoysReformatory",
"title": "Boys Reformatory",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/nancy-drew-reporter-720p-1939",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=NancyDrewReporter720p1939",
"title": "Nancy Drew Reporter",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/war-babies-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=WarBabies",
"title": "War Babies",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/galloping-romeo",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Galloping_Romeo_poster.jpg",
"title": "Galloping Romeo",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/frankenstein-1910",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Frank-1910bis.jpg",
"title": "Frankenstein",
"year": 1910},
{"url": "http://publicdomainmovie.net/movie/suds",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Suds_1920_silent_film_lobbycard.jpg",
"title": "Suds",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/manfish-0",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Manfish",
"title": "Manfish",
"year": 1956},
{"url": "http://publicdomainmovie.net/movie/d-w-griffiths-the-sealed-room-1909",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Sealed_Room_%25281909%2529.webm",
"title": "THE SEALED ROOM",
"year": 1909},
{"url": "http://publicdomainmovie.net/movie/the-little-shop-of-horrors-1960-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Little_Shop_of_Horrors_Nicholson.JPG",
"title": "The Little Shop of Horrors",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/the-bat-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheBat1960",
"title": "The Bat",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/she-shoulda-said-no",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=She_Shoulda_Said_No%2521.jpg",
"title": "She Shoulda Said No!",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/the-conquering-power",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Theconqueringpower1921movieposter.jpg",
"title": "The Conquering Power",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/sinners-in-paradise-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=SinnersInParadise_287",
"title": "Sinners In Paradise",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/the-mysterious-rider",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=TheMysteriousRider",
"title": "The Mysterious Rider",
"year": 1942},
{"url": "http://publicdomainmovie.net/movie/unholy-love-1932",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=UnholyLove1932",
"title": "Unholy Love",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/randy-rides-alone-1934",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=RandyRidesAlone1934",
"title": "Randy Rides Alone",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/the-nut-farm",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Nut-Farm-Boxoffice-FC-1935.jpg",
"title": "The Nut Farm",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/i-thank-you-1941",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=IThankYou1941",
"title": "I Thank You",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/haldane-of-the-secret-service",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=HaldaneOfTheSecretService",
"title": "Haldane of the Secret Service",
"year": 1923},
{"url": "http://publicdomainmovie.net/movie/swing",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Swing_poster.jpg",
"title": "Swing!",
"year": 1938},
{"url": "http://publicdomainmovie.net/movie/mutiny-in-the-big-house",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Mutiny_in_the_Big_House_poster.jpg",
"title": "Mutiny in the Big House",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/rage-at-dawn-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=RageAtDawn_618",
"title": "Rage At Dawn",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/spooks-run-wildweirdness-bad-movie",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Spooksrunwild.jpg",
"title": "Spooks Run Wild",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-sawmill",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheSawMill",
"title": "The Sawmill",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/the-amazing-transparent-man-1960",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=TheAmazingTransparentMan1960",
"title": "The Amazing Transparent Man",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/tolable-david",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Tol%2527able_David-Poster.JPG",
"title": "Tol able David",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/american-aristocracy",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=American_Aristocracy_1921_newspaperad.jpg",
"title": "American Aristocracy",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/the-montana-kid",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=TheMontanaKid",
"title": "The Montana Kid",
"year": 1931},
{"url": "http://publicdomainmovie.net/movie/the-outlaw-vostfr",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jack_Buetel-Jane_Russell_in_The_Outlaw.jpg",
"title": "The Outlaw",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/drift-fence",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=DriftFence",
"title": "Drift Fence",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-kennel-murder-case",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/image.php?id=KennelMurderCase1933",
"title": "The Kennel Murder Case",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-scar",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=TheScar",
"title": "The Scar",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/dr-christian-remedy-for-riches",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=RemedyForRiches",
"title": "Dr. Christian - Remedy For Riches",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-roaring-road",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Roaring_Road_%25281919%2529_-_1.jpg",
"title": "The Roaring Road",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/my-favorite-brunette",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=MyFavoriteBrunette1947",
"title": "My Favorite Brunette",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/sepia-cinderella",
"genre": "Musical",
"image": "http://publicdomainmovie.net/image.php?id=SepiaCinderella",
"title": "Sepia Cinderella",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/lady-frankenstein-german-widescreen-extended-cut",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=LadyFrankensteinGermanExtendedCut_Silent_",
"title": "Lady Frankenstein German Widescreen Extended Cut",
"year": 1971},
{"url": "http://publicdomainmovie.net/movie/terror-island",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Terror_Island_%25281920%2529_-_Ad_1.jpg",
"title": "Terror Island",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/fatty-joins-the-force",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Fattyjoinstheforce.jpg",
"title": "Fatty Joins The Force",
"year": 1913},
{"url": "http://publicdomainmovie.net/movie/springtime-in-the-sierras",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=SpringtimeInTheSierras",
"title": "Springtime In The Sierras",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/dunces-and-dangers",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Dunces_and_Dangers.jpg",
"title": "Dunces and Dangers",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/a-stranger-in-town-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Stranger_in_Town_%25281943%2529_1.jpg",
"title": "A Stranger In Town",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/meet-dr-christian",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=MeetDr.Christian",
"title": "Meet Dr. Christian",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/the-general-line-staroye-i-novoye",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=StaroyeINovoye",
"title": "The General Line (Staroye I Novoye)",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-2",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=gulliverstravels1939",
"title": "Gullivers Travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/shadows-of-death-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Buster_Crabbe-Al_St._John_in_Shadows_of_Death.jpg",
"title": "Shadows of Death",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/fattys-spooning-days",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=FattysSpooningDays_888",
"title": "Fattys Spooning Days",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/sands-of-sacrifice",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Tangled_Trails_FilmPoster.jpeg",
"title": "Sands of Sacrifice",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/hearts-of-humanity",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Film_Daily_1919_Dorothy_Phillips_The_Heart_of_Humanity.png",
"title": "Hearts of Humanity",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/miss-london-ltd-1943",
"genre": "Musical",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Miss_London_Ltd._FilmPoster.jpeg",
"title": "Miss London Ltd.",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/tumbleweeds",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Tumbleweeds_1925.jpg",
"title": "Tumbleweeds",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/nothing-sacred-video-quality-upgrade",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=NothingSacredVideoQualityUpgrade",
"title": "NOTHING SACRED video quality upgrade",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-sentimental-bloke",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Arthur_Tauchert.jpg",
"title": "The Sentimental Bloke",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/raggedy-rose-1926",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_of_the_movie_Raggedy_Rose.jpg",
"title": "RAGGEDY ROSE",
"year": 1926},
{"url": "http://publicdomainmovie.net/movie/the-wasp-woman-1959",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheWaspWoman1959",
"title": "The Wasp Woman",
"year": 1960},
{"url": "http://publicdomainmovie.net/movie/home-town-story-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Hometown11.jpg",
"title": "Home Town Story",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/mason-of-the-mounted",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=MasonoftheMounted",
"title": "Mason of the Mounted",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/the-mothering-heart",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheMotheringHeard",
"title": "The Mothering Heart",
"year": 1913},
{"url": "http://publicdomainmovie.net/movie/born-to-battle",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Born_to_Battle_FilmPoster.jpeg",
"title": "Born to Battle",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/the-lost-zeppelin",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Lost_World_%25281925%2529_-_film_poster.jpg",
"title": "The Lost Zeppelin",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/seven-keys-to-baldpate",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Seven_Keys_to_Baldpate_1917_poster.jpg",
"title": "Seven Keys To Baldpate",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/sing-sing-nights",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=Sing_Sing_Nights_1934",
"title": "Sing Sing Nights",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/diamond-trail",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=DIAMOND_TRAIL_poster.jpg",
"title": "Diamond Trail",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/born-to-battle-1935-tom-tyler",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Born_to_Battle_FilmPoster.jpeg",
"title": "Born To Battle",
"year": 1935},
{"url": "http://publicdomainmovie.net/movie/fattys-suitless-day",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=FattysSuitlessDay",
"title": "Fattys Suitless Day",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/the-pay-off",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=ThePay-Off",
"title": "The Pay-Off",
"year": 1930},
{"url": "http://publicdomainmovie.net/movie/sherlock-holmes-and-the-secret-weapon-720p-1943",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Sherlock_Holmes_and_the_Secret_Weapon_2.jpg",
"title": "Sherlock Holmes And The Secret Weapon",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/aelita-queen-of-mars-original-version",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Aelita_screenshot.jpg",
"title": "AELITA: QUEEN OF MARS",
"year": 1924},
{"url": "http://publicdomainmovie.net/movie/power-dive-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Screen_shot_Power_Dive.png",
"title": "Power Dive",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/the-ape-1940-0",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Theape.jpg",
"title": "The Ape",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/femmine-folli-foolish-wives-1921",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Foolish_Wives_ad.jpg",
"title": "Femmine Folli",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/bucket-of-blood-720p-1959",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=BucketOfBlood720p1959",
"title": "Bucket Of Blood",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-arizona-kid",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Arizona_Kid.jpg",
"title": "The Arizona Kid",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/city-without-men-1943",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=CityWithoutMen1943",
"title": "City Without Men",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/j-u-n-k",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=J-U-N-K",
"title": "J-U-N-K",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/sixteen-fathoms-deep-1934",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=SixteenFathomsDeep1934",
"title": "Sixteen Fathoms Deep",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/his-picture-in-the-papers",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=His_Picture_in_the_Papers_Poster.jpg",
"title": "His Picture in the Papers",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/brain-that-wouldnt-die-upgrade",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Brainthatwouldntdie_film_poster.jpg",
"title": "Brain That Wouldnt Die Upgrade",
"year": 1962},
{"url": "http://publicdomainmovie.net/movie/bliss-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=Bliss_463",
"title": "Bliss",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-son-of-a-gun-1919",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=TheSon-of-a-gun1919",
"title": "The Son-of-a-gun",
"year": 1919},
{"url": "http://publicdomainmovie.net/movie/the-little-american",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Little_American_1917.jpg",
"title": "The Little American",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/flirting-with-danger",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Flirting_With_Danger_lobby_card.jpg",
"title": "Flirting With Danger",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/where-theres-a-will-1936",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=WhereTheresAWill1936_864",
"title": "Where Theres A Will",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/the-county-fair",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_County_Fair_FilmPoster.jpeg",
"title": "The County Fair",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/the-lucifer-complex",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=LuciferComplex",
"title": "The Lucifer Complex",
"year": 1978},
{"url": "http://publicdomainmovie.net/movie/good-morning-boys-1937",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=GoodMorningBoys1937",
"title": "Good Morning, Boys",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-general-line-eng",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheGeneralLineeng",
"title": "The General Line",
"year": 1929},
{"url": "http://publicdomainmovie.net/movie/flirting-with-fate-1938",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Flirting_with_Fate.jpg",
"title": "Flirting With Fate",
"year": 1916},
{"url": "http://publicdomainmovie.net/movie/home-sweet-home",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Home.Sweet.Home",
"title": "Home, Sweet Home",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/high-powered",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=High_Powered_1945",
"title": "High Powered",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/a-study-in-scarlet-720p-1933",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=%25D0%2597%25D0%25B0%25D0%25BD%25D1%258F%25D1%2582%25D0%25B8%25D1%258F_%25D0%25B2_%25D0%25B0%25D0%25BB%25D0%25BE%25D0%25BC.png",
"title": "A Study In Scarlet",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/the-north-star-video-quality-upgrade",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=RKO_Pictures_The_North_Star_.JPG",
"title": "THE NORTH STAR",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/enoch-arden",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Enoch_Arden_%25281911_film%2529.jpg",
"title": "Enoch Arden",
"year": 1911},
{"url": "http://publicdomainmovie.net/movie/carmen",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Carmen_1915.jpg",
"title": "Carmen",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/white-zombie-3",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_White_Zombie_01_Crisco_restoration.jpg",
"title": "White Zombie",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/fattys-suitless-day-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=FattrysSuitlessDay",
"title": "Fattys Suitless Day",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/bride-of-the-gorilla-complete-version",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=BrideOfTheGorillamostCompleteVersion",
"title": "BRIDE OF THE GORILLA",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/gullivers-travels-3",
"genre": "Animation",
"image": "http://publicdomainmovie.net/image.php?id=GulliversTravels",
"title": "Gullivers travels",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/retrovision-theater-presents-swamp-fire-1946",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=Lbines-RetroVisionTheaterPresentsSwampFire1946870",
"title": "Swamp Fire",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/romance-of-the-redwoods",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=A_Romance_of_the_Redwoods.jpg",
"title": "Romance Of The Redwoods",
"year": 1917},
{"url": "http://publicdomainmovie.net/movie/the-virginian",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Thevirginian_1914_poster.jpg",
"title": "The Virginian",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/woman-in-green-720p-1945",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Woman_in_Green_%25281945%2529_3.jpg",
"title": "Woman In Green",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/the-phantom-from-10000-leagues-widescreen-quality-upgrade",
"genre": "Sci-fi",
"image": "http://publicdomainmovie.net/image.php?id=ThePhantomFrom10000LeaguesWidescreenQualityUpgrade",
"title": "THE PHANTOM FROM 10,000 LEAGUES",
"year": 1955},
{"url": "http://publicdomainmovie.net/movie/the-light-of-western-stars-1940",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=TheLightOfWesternStars1940",
"title": "The Light of Western Stars",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-squaw-man",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=TheSquawMan1914.jpg",
"title": "The Squaw Man",
"year": 1914},
{"url": "http://publicdomainmovie.net/movie/mliss-1918-mary-pickford-thomas-meighan",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=M%2527Liss_%25281918%2529_1.jpg",
"title": "M Liss",
"year": 1918},
{"url": "http://publicdomainmovie.net/movie/earthworm-tractors-720",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Earthworm_Tractors_%25281936%2529_1.jpg",
"title": "Earthworm Tractors",
"year": 1936},
{"url": "http://publicdomainmovie.net/movie/femmine-folli-foolish-wives-1921-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Foolish_Wives_ad.jpg",
"title": "Femmine Folli",
"year": 1921},
{"url": "http://publicdomainmovie.net/movie/the-show",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Show_%25281927_film%2529.jpg",
"title": "The Show",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/granddad",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=Granddad",
"title": "Granddad",
"year": 1913},
{"url": "http://publicdomainmovie.net/movie/they-made-me-a-criminal-1939",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Poster_-_They_Made_Me_a_Criminal_01.jpg",
"title": "They Made Me a Criminal",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/dynamite-1949",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Dynamite-1949",
"title": "Dynamite",
"year": 1949},
{"url": "http://publicdomainmovie.net/movie/the-cheat-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The_Cheat_FilmPoster.jpeg",
"title": "The Cheat",
"year": 1915},
{"url": "http://publicdomainmovie.net/movie/the-gorilla-1939",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=PhantasmagoriaTheater-TheGorilla1939761",
"title": "The Gorilla",
"year": 1939},
{"url": "http://publicdomainmovie.net/movie/springtime-in-the-sierras-1947",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=SpringtimeInTheSierras1947",
"title": "Springtime In The Sierras",
"year": 1947},
{"url": "http://publicdomainmovie.net/movie/the-giant-gila-monster-widescreen-quality-upgrade",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=TheGiantGilaMonsterWidescreenQualityUpgrade",
"title": "The Giant Gila Monster",
"year": 1959},
{"url": "http://publicdomainmovie.net/movie/the-vampire-bat-1",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Vampirebat.jpg",
"title": "THE VAMPIRE BAT",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/gunsmoke-ranch-0",
"genre": "Western",
"image": "http://publicdomainmovie.net/image.php?id=gunsmoke-ranch",
"title": "Gunsmoke Ranch",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/fathers-little-dividend-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=fatherslittledividend",
"title": "Fathers Little Dividend",
"year": 1941},
{"url": "http://publicdomainmovie.net/movie/exposure-1932",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=Exposure1932",
"title": "Exposure",
"year": 1932},
{"url": "http://publicdomainmovie.net/movie/man-in-the-attic-with-spanish-subtitles",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=ManInTheAtticwithSpanishSubtitles",
"title": "Man In The Attic (with spanish subtitles)",
"year": 1953},
{"url": "http://publicdomainmovie.net/movie/wrong-road-the",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=wrong-road",
"title": "Wrong Road, The",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/gamblers-choice-0",
"genre": "Romance",
"image": "http://publicdomainmovie.net/image.php?id=gamblers-choice",
"title": "Gamblers Choice",
"year": 1944},
{"url": "http://publicdomainmovie.net/movie/the-great-rupert-video-quality-upgrade",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=TheGreatRupertVideoQualityUpgrade",
"title": "THE GREAT RUPERT",
"year": 1950},
{"url": "http://publicdomainmovie.net/movie/suddenly1954",
"genre": "Crime",
"image": "http://publicdomainmovie.net/image.php?id=Suddenly___1954",
"title": "Suddenly",
"year": 1954},
{"url": "http://publicdomainmovie.net/movie/the-scar-aka-hollow-triumph-upgrade",
"genre": "Mystery",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Joan_Bennett_in_The_Scar_crop.jpg",
"title": "The Scar AKA Hollow Triumph",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/legion-of-missing-men",
"genre": "Adventure",
"image": "http://publicdomainmovie.net/image.php?id=LegionOfMissingMen",
"title": "Legion of Missing Men",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/dementia-13-720p-1963",
"genre": "Horror",
"image": "http://publicdomainmovie.net/image.php?id=Dementia13720p1963",
"title": "Dementia 13",
"year": 1963},
{"url": "http://publicdomainmovie.net/movie/the-eagle-rudolph-valentino",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheEagleRudolphValentinoAppleTV2ndGen..mp4",
"title": "The Eagle",
"year": 1925},
{"url": "http://publicdomainmovie.net/movie/parole-inc-0",
"genre": "Crime",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Parole%252C_Inc._%25281948%2529_poster.jpg",
"title": "Parole Inc.",
"year": 1948},
{"url": "http://publicdomainmovie.net/movie/pecks-bad-boy-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Jackie_Cooper_in_Pecks_Bad_Boy.png",
"title": "Pecks Bad Boy",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/angel-on-my-shoulder-1",
"genre": "Fantasy",
"image": "http://publicdomainmovie.net/image.php?id=AngelOnMyShoulder_43",
"title": "Angel on My Shoulder",
"year": 1946},
{"url": "http://publicdomainmovie.net/movie/road-to-bali",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Bob_Hope%252C_Bing_Crosby_and_Dorothy_Lamour_in_Road_to_Bali.jpg",
"title": "Road to Bali",
"year": 1952},
{"url": "http://publicdomainmovie.net/movie/speedy",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Speedy_poster.jpg",
"title": "Speedy",
"year": 1928},
{"url": "http://publicdomainmovie.net/movie/arizona-days-1937",
"genre": "Western",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Arizona_Days_%25281928%2529_-_McGowan_%2526_Custer.jpg",
"title": "Arizona Days",
"year": 1937},
{"url": "http://publicdomainmovie.net/movie/the-ape-man-originalton-mit-deutschen-untertiteln",
"genre": "Horror",
"image": "http://publicdomainmovie.net/wikimedia.php?id=The-Ape-Man-Poster.jpg",
"title": "The Ape Man Originalton mit deutschen Untertiteln",
"year": 1943},
{"url": "http://publicdomainmovie.net/movie/go-for-broke-1",
"genre": "War",
"image": "http://publicdomainmovie.net/image.php?id=GoForBroke",
"title": "Go for Broke",
"year": 1951},
{"url": "http://publicdomainmovie.net/movie/his-double-life-0",
"genre": "Comedy",
"image": "http://publicdomainmovie.net/image.php?id=HisDoubleLife",
"title": "His Double Life",
"year": 1933},
{"url": "http://publicdomainmovie.net/movie/thecourageousdrchristian",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=TheCourageousDr.Christian",
"title": "The Courageous Dr. Christian",
"year": 1940},
{"url": "http://publicdomainmovie.net/movie/the-light-of-faith-1922",
"genre": "Silent",
"image": "http://publicdomainmovie.net/image.php?id=TheLightOfFaith1922",
"title": "The Light of Faith",
"year": 1922},
{"url": "http://publicdomainmovie.net/movie/combat-america",
"genre": "War",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Combat_America_poster.png",
"title": "Combat America",
"year": 1945},
{"url": "http://publicdomainmovie.net/movie/outside-the-law-0",
"genre": "Silent",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Outside_the_Law_%25281920%2529_poster.jpg",
"title": "Outside the Law",
"year": 1920},
{"url": "http://publicdomainmovie.net/movie/the-road-to-ruin-0",
"genre": "Drama",
"image": "http://publicdomainmovie.net/image.php?id=theroadtoruin",
"title": "The Road to Ruin",
"year": 1934},
{"url": "http://publicdomainmovie.net/movie/millie",
"genre": "Drama",
"image": "http://publicdomainmovie.net/wikimedia.php?id=Millie_1931_poster.jpg",
"title": "Millie",
"year": 1931}]
for data_dict in data_source:
models.movie.create(**data_dict)
| 44.854655
| 327
| 0.691444
| 30,519
| 244,727
| 5.468331
| 0.071202
| 0.274316
| 0.313504
| 0.176346
| 0.825829
| 0.799896
| 0.784089
| 0.693387
| 0.630555
| 0.442416
| 0
| 0.039007
| 0.094485
| 244,727
| 5,456
| 328
| 44.854655
| 0.714085
| 0
| 0
| 0.521636
| 0
| 0.022919
| 0.763456
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.00055
| 0.000367
| 0
| 0.000367
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9be12e87cb611b2d46e8308590703e6e80871c09
| 85,333
|
py
|
Python
|
tests/automation_framework/tests/work_order_tests/test_submit.py
|
shresthichauhan/trusted-compute-framework
|
1ad89fa6fa4492f43bb79e1c9be3536c4f0ff7f7
|
[
"Apache-2.0"
] | null | null | null |
tests/automation_framework/tests/work_order_tests/test_submit.py
|
shresthichauhan/trusted-compute-framework
|
1ad89fa6fa4492f43bb79e1c9be3536c4f0ff7f7
|
[
"Apache-2.0"
] | null | null | null |
tests/automation_framework/tests/work_order_tests/test_submit.py
|
shresthichauhan/trusted-compute-framework
|
1ad89fa6fa4492f43bb79e1c9be3536c4f0ff7f7
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import logging
import os
from src.libs import constants
from src.libs.avalon_test_wrapper \
import read_json, submit_request
from src.libs.test_base import TestBase
from src.utilities.verification_utils \
import verify_test, check_negative_test_responses
from src.utilities.generic_utils import TestStep
logger = logging.getLogger(__name__)
class TestClass():
test_obj = TestBase()
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_success
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.p1
def test_work_order_success(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_success.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_inData_DataEncryptionKey_hyphen_echo
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_inData_DataEncryptionKey_hyphen_echo(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_inData_DataEncryptionKey_hyphen_echo.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_data_datahash_null
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_data_datahash_null(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_data_datahash_null.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for data hash of in data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_requesterId_null
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_requesterId_null(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_requester_id_null.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_sessionkeyiv_and_iv_indata_hex_string
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_sessionkeyiv_and_iv_indata_hex_string(
self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_iv_indata_hex_string.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_get_result
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_get_result(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_get_result.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_verify_signature
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_verify_signature(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_verify_signature.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_requesterNonce_all_special_characters
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_requesterNonce_all_special_characters(
self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_requesterNonce_all_special_characters.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_alternate_worker_signing_algorithm
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_alternate_worker_signing_algorithm(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_alternate_worker_signing_algorithm.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_alternate_hashing_algorithm
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_alternate_hashing_algorithm(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_alternate_hashing_algorithm.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_without_requester_private_key
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_without_requester_private_key(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_without_requester_private_key.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_twice_params
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_twice_params(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_twice_params.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_Submit_invalid_parameter_Workloadid
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_Submit_invalid_parameter_Workloadid(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_Submit_invalid_parameter_Workloadid.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_methodename_list
@pytest.mark.listener
def test_work_order_methodename_list(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_methodename_list.json")
# err_cd = \
# self.test_obj.setup_and_build_request_wo_submit(
# read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
read_json(request_file),
constants.wo_submit_output_json_file_name,
read_json(request_file))
# result_response = self.test_obj.getresult(
# self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_signing_wrong
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_signing_wrong(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_signing_wrong.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_workerEncryptionKey_special_character
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_workerEncryptionKey_special_character(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_workerEncryptionKey_special_character.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_worker_encryption_key
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_worker_encryption_key (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_worker_encryption_key.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_alternate_dataEncryption_algorithm
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_alternate_dataEncryption_algorithm (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_alternate_dataEncryption_algorithm.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Unsupported dataEncryptionAlgorithm found in the request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_50_index_indata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_50_index_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_50_index_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_changing_order_index
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_changing_order_index (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_changing_order_index.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_index0_indata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_index0_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_index0_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_empty_indata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_empty_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_empty_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Indata is empty")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_no_indata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_no_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_no_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Missing parameter inData")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_empty_indata_outdata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_empty_indata_outdata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_empty_indata_outdata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Indata is empty")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_indata_unknown_parameter_value
@pytest.mark.listener
# @pytest.mark.sdk (AttributeError: 'dict' object has no attribute 'to_jrpc_string)
def test_work_order_with_indata_unknown_parameter_value (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_indata_unknown_parameter_value.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
# submit_response = submit_request(
# self.test_obj.uri_client,
# read_json(request_file),
# constants.wo_submit_output_json_file_name,
# read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
# result_response = self.test_obj.getresult(
# self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Server error")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_negative_index
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_negative_index (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_negative_index.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_empty_indata_hash
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_with_empty_indata_hash (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_empty_indata_hash.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_datahash_random_str
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_datahash_random_str (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_datahash_random_str.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for data hash of in data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_multiple_data_echoresult
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_multiple_data_echoresult (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_multiple_data_echoresult.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_echoclient
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_echoclient (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_echoclient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_diff_text_data_indata_echoClient
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_diff_text_data_indata_echoClient (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_diff_text_data_indata_echoClient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_specialcharacter_data_single_index_indata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_specialcharacter_data_single_index_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_specialcharacter_data_single_index_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_special_char_iv_echoresult
@pytest.mark.listener
# @pytest.mark.sdk
def test_work_order_special_char_iv_echoresult (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_special_char_iv_echoresult.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for initialization vector of in data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_requesterId_param_remove
@pytest.mark.listener
def test_work_order_submit_requesterId_param_remove (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_requesterId_param_remove.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Missing parameter requesterId")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_with_response_timeout_str
@pytest.mark.listener
def test_work_order_with_response_timeout_str (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_with_response_timeout_str.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for responseTimeoutMSecs")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_dataEncryptionAlgorithm_list
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_dataEncryptionAlgorithm_list(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_multiple_dataEncryptionAlgorithm.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.FAILURE.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_two_workload_in_workloadId
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_two_workload_in_workloadId(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_two_workloadid.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_WorkOrderId_null
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.p1
@pytest.mark.set1
def test_work_order_submit_WorkOrderId_null(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_WorkOrderId_null.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_workerId_null_randomString
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_workerId_null_randomString (self):
request_file = os.path.join(
constants.work_order_input_file,
"workorder_workerId_null_number_randomString.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Work order Id not found in the database. Hence invalid parameter")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_workloadId_specialcharacters
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_workloadId_specialcharacters(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_workloadId_specialcharacters.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_specialcharacter_data_echoClient
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_specialcharacter_data_echoClient(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_specialcharacter_data_echoClient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_inData_outData_encryptedDataEncryptionKey_null_echoClient
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_inData_outData_encryptedDataEncryptionKey_null_echoClient(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_both_in_out_Data_EncryptionKey_null_echo.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_dataEncryptionAlgorithm_list_same_algo_twice
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_dataEncryptionAlgorithm_list_same_algo_twice(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_dataEncryptionAlgorithm_list_same_algo_twice.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.FAILURE.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_inData_outData_encryptedDataEncryptionKey_hyphen_echoClient
@pytest.mark.listener
@pytest.mark.set1
def test_work_order_submit_inData_outData_encryptedDataEncryptionKey_hyphen_echoClient(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_inData_outData_encryptedDataEncryptionKey_hyphen_echoClient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_encryptedDataEncryptionKey_not_set_echoClient
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_encryptedDataEncryptionKey_not_set_echoClient(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_encryptedDataEncryptionKey_not_set_echoClient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Work order Id not found in the database. Hence invalid parameter")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_encryptedDataEncryptionKey_empty_echoClient
@pytest.mark.listener
@pytest.mark.sdk
@pytest.mark.set1
def test_work_order_submit_encryptedDataEncryptionKey_empty_echoClient(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_encryptedDataEncryptionKey_empty_echoClient.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_with_outdata
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_with_outdata(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_with_outdata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_remove_both_data_datahash_in_inData
@pytest.mark.listener
def test_work_order_submit_remove_both_data_datahash_in_inData (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_remove_both_data_datahash_in_inData.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Missing in data parameter data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_with_one_valid_and_other_empty_data_and_datahash_in_indata
@pytest.mark.listener
def test_work_order_submit_with_one_valid_and_other_empty_data_and_datahash_in_indata (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_with_one_valid_and_other_empty_data_and_datahash_in_indata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for data hash of in data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_remove_both_data_datahash_Single_index_in_inData
@pytest.mark.listener
def test_work_order_submit_remove_both_data_datahash_Single_index_in_inData (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_remove_both_data_datahash_Single_index_in_inData.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
submit_response,
"Missing in data parameter data")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_indata_data_index2_random_str
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_indata_data_index2_random_str(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_indata_data_index2_random_str.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_indata_data_index1_random_str
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_indata_data_index1_random_str(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_indata_data_index1_random_str.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_workload_id_empty_string
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_workload_id_empty_string(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_workload_id_empty_string.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_workload_id_hex_string
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_workload_id_hex_string(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_workload_id_hex_string.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_workLoad_null_string
@pytest.mark.listener
def test_work_order_submit_workLoad_null_string(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_workLoad_null_string.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_WorkOrder_increased_hexlength
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_WorkOrder_increased_hexlength(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_WorkOrder_increased_hexlength.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_same_WorkOrderID_WorkloadId
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_same_WorkOrderID_WorkloadId (self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_same_WorkOrderID_WorkloadId.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid work order Id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_indata_index1_data_different_hexlength
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_indata_index1_data_different_hexlength(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_indata_index1_data_different_hexlength.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_requesterId_som_special_characters
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_requesterId_som_special_characters(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_requesterId_som_special_characters.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for requester id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_work_order_submit_requesterNonce_param_empty
@pytest.mark.listener
@pytest.mark.sdk
def test_work_order_submit_requesterNonce_param_empty(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_requesterNonce_param_empty.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
check_negative_test_responses(
result_response,
"Invalid Request")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_requestersignature_difflength
@pytest.mark.listener
def test_workordersubmit_requestersignature_difflength(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_verify_requesterSignature_diff_length.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_verifyingkey_remove
@pytest.mark.listener
def test_workordersubmit_verifyingkey_remove(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_without_verifyingkey.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_verifyingkey_nullstr
@pytest.mark.listener
def test_workordersubmit_verifyingkey_nullstr(self):
request_file = os.path.join(
constants.work_order_input_file,
"work_order_submit_verifyingkey_null_str.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_requestersignature_remove
@pytest.mark.listener
def test_workordersubmit_requestersignature_remove(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_requestersignature_remove.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_indata_outdata
@pytest.mark.listener
@pytest.mark.sdk
def test_workordersubmit_indata_outdata(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_indata_outdata.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_workorderId_remove
@pytest.mark.sdk
def test_workordersubmit_workorderId_remove(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_workorderId_remove.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for work order id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_sessionkeyiv_allspecialchar
@pytest.mark.listener
def test_workordersubmit_sessionkeyiv_allspecialchar(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_sessionkeyiv_allspecialchar.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid data format for session key iv")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_requesterId_variouslengthhex
@pytest.mark.listener
@pytest.mark.sdk
def test_workordersubmit_requesterId_variouslengthhex (self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_requesterId_variouslengthhex.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_workerEncryptionKey_notdefaulthex
@pytest.mark.sdk
@pytest.mark.listener
def test_workordersubmit_workerEncryptionKey_notdefaulthex(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_workerEncryptionKey_notdefaulthex.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_requesterNonce_notdefaultlength
@pytest.mark.listener
@pytest.mark.sdk
def test_workordersubmit_requesterNonce_notdefaultlength(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_requesterNonce_notdefaultlength.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_encryptedRequestHash_norequesterSignature
@pytest.mark.listener
def test_workordersubmit_encryptedRequestHash_norequesterSignature(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_encryptedRequestHash_norequesterSignature.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_requesterSignature_noencryptedRequestHash
@pytest.mark.listener
def test_workordersubmit_requesterSignature_noencryptedRequestHash(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_requesterSignature_noencryptedRequestHash.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Missing parameter encryptedRequestHash")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_mandatoryfields_remove
@pytest.mark.sdk
def test_workordersubmit_mandatoryfields_remove(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_mandatoryfields_remove.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid params")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_id_remove
@pytest.mark.listener
def test_workordersubmit_id_remove(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_id_remove.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Server error")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_workeridworkloadid_same
@pytest.mark.listener
@pytest.mark.sdk
def test_workordersubmit_workeridworkloadid_same(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_workeridworkloadid_same.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
assert (
check_negative_test_responses(
submit_response,
"Invalid workload id")
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
@pytest.mark.work_order_submit
@pytest.mark.test_workordersubmit_indata_firstinparams
@pytest.mark.listener
@pytest.mark.sdk
def test_workordersubmit_indata_firstinparams(self):
request_file = os.path.join(
constants.work_order_input_file,
"workordersubmit_indata_firstinparams.json")
err_cd = \
self.test_obj.setup_and_build_request_wo_submit(
read_json(request_file))
submit_response = submit_request(
self.test_obj.uri_client,
self.test_obj.build_request_output['request_obj'],
constants.wo_submit_output_json_file_name,
read_json(request_file))
result_response = self.test_obj.getresult(
self.test_obj.build_request_output['request_obj'])
assert (
verify_test(
result_response, 0,
self.test_obj.build_request_output['pre_test_output'],
self.test_obj.build_request_output['action_obj'])
is TestStep.SUCCESS.value)
logger.info('\t\t!!! Test completed !!!\n\n')
| 37.791408
| 99
| 0.644042
| 9,998
| 85,333
| 5.04921
| 0.024805
| 0.061428
| 0.096312
| 0.070362
| 0.960184
| 0.955529
| 0.942336
| 0.920328
| 0.9108
| 0.903292
| 0
| 0.001277
| 0.2749
| 85,333
| 2,257
| 100
| 37.808152
| 0.814594
| 0.013043
| 0
| 0.863563
| 0
| 0
| 0.110937
| 0.041928
| 0
| 0
| 0
| 0
| 0.041148
| 1
| 0.041148
| false
| 0
| 0.004331
| 0
| 0.046562
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
501c6a75b6c2573b9a6c7414c511b88f506bce54
| 148
|
py
|
Python
|
Codewars/8kyu/evil-or-odious/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/8kyu/evil-or-odious/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/8kyu/evil-or-odious/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
test.assert_equals(evil(1), "It's Odious!")
test.assert_equals(evil(2), "It's Odious!")
test.assert_equals(evil(3), "It's Evil!")
| 24.666667
| 43
| 0.675676
| 28
| 148
| 3.464286
| 0.464286
| 0.309278
| 0.494845
| 0.618557
| 0.597938
| 0.597938
| 0.597938
| 0
| 0
| 0
| 0
| 0.044776
| 0.094595
| 148
| 5
| 44
| 29.6
| 0.679104
| 0.094595
| 0
| 0
| 0
| 0
| 0.257576
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.