hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
83bdfdd8bc1c2733f2034e79debc02a3facb04ed
| 167,101
|
py
|
Python
|
ubertool/agdrift/tests/test_agdrift_unittest.py
|
qed-uber/ubertool
|
472a143e110f634afdfe03d503e5f442b1e57b86
|
[
"Unlicense"
] | 2
|
2018-01-02T12:58:00.000Z
|
2018-04-03T15:40:59.000Z
|
ubertool/agdrift/tests/test_agdrift_unittest.py
|
qed-uber/ubertool
|
472a143e110f634afdfe03d503e5f442b1e57b86
|
[
"Unlicense"
] | 21
|
2017-08-02T18:00:16.000Z
|
2019-08-20T15:57:09.000Z
|
ubertool/agdrift/tests/test_agdrift_unittest.py
|
quanted/ubertool
|
472a143e110f634afdfe03d503e5f442b1e57b86
|
[
"Unlicense"
] | null | null | null |
from __future__ import division #brings in Python 3.0 mixed type calculation rules
import datetime
import inspect
import numpy as np
import numpy.testing as npt
import os.path
import pandas as pd
import sys
from tabulate import tabulate
import unittest
##find parent directory and import model
#parentddir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
#sys.path.append(parentddir)
from ..agdrift_exe import Agdrift
test = {}
class TestAgdrift(unittest.TestCase):
"""
IEC unit tests.
"""
def setUp(self):
"""
setup the test as needed
e.g. pandas to open agdrift qaqc csv
Read qaqc csv and create pandas DataFrames for inputs and expected outputs
:return:
"""
pass
def tearDown(self):
"""
teardown called after each test
e.g. maybe write test results to some text file
:return:
"""
pass
def create_agdrift_object(self):
# create empty pandas dataframes to create empty object for testing
df_empty = pd.DataFrame()
# create an empty agdrift object
agdrift_empty = Agdrift(df_empty, df_empty)
return agdrift_empty
def test_validate_sim_scenarios(self):
"""
:description determines if user defined scenarios are valid for processing
:param application_method: type of Tier I application method employed
:param aquatic_body_def: type of endpoint of concern (e.g., pond, wetland); implies whether
: endpoint of concern parameters (e.g.,, pond width) are set (i.e., by user or EPA standard)
:param drop_size_*: qualitative description of spray droplet size for aerial & ground applications
:param boom_height: qualitative height above ground of spray boom
:param airblast_type: type of orchard being sprayed
:NOTE we perform an additional validation check related to distances later in the code just before integration
:return
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
agdrift_empty.out_sim_scenario_chk = pd.Series([], dtype='object')
expected_result = pd.Series([
'Valid Tier I Aquatic Aerial Scenario',
'Valid Tier I Terrestrial Aerial Scenario',
'Valid Tier I Aquatic Aerial Scenario',
'Valid Tier I Terrestrial Aerial Scenario',
'Valid Tier I Aquatic Aerial Scenario',
'Valid Tier I Terrestrial Ground Scenario',
'Valid Tier I Aquatic Ground Scenario',
'Valid Tier I Terrestrial Ground Scenario',
'Valid Tier I Aquatic Ground Scenario',
'Valid Tier I Terrestrial Airblast Scenario',
'Valid Tier I Aquatic Airblast Scenario',
'Valid Tier I Terrestrial Airblast Scenario',
'Valid Tier I Aquatic Airblast Scenario',
'Valid Tier I Terrestrial Airblast Scenario',
'Invalid Tier I Aquatic Aerial Scenario',
'Invalid Tier I Aquatic Ground Scenario',
'Invalid Tier I Aquatic Airblast Scenario',
'Invalid Tier I Terrestrial Aerial Scenario',
'Valid Tier I Terrestrial Ground Scenario',
'Valid Tier I Terrestrial Airblast Scenario',
'Invalid scenario ecosystem_type',
'Invalid Tier I Aquatic Assessment application method',
'Invalid Tier I Terrestrial Assessment application method'],dtype='object')
try:
#set test data
agdrift_empty.num_simulations = len(expected_result)
agdrift_empty.application_method = pd.Series(
['tier_1_aerial',
'tier_1_aerial',
'tier_1_aerial',
'tier_1_aerial',
'tier_1_aerial',
'tier_1_ground',
'tier_1_ground',
'tier_1_ground',
'tier_1_ground',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_aerial',
'tier_1_ground',
'tier_1_airblast',
'tier_1_aerial',
'tier_1_ground',
'tier_1_airblast',
'tier_1_aerial',
'Tier II Aerial',
'Tier III Aerial'], dtype='object')
agdrift_empty.ecosystem_type = pd.Series(
['aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'aquatic_assessment',
'aquatic_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'terrestrial_assessment',
'terrestrial_assessment',
'Field Assessment',
'aquatic_assessment',
'terrestrial_assessment'], dtype='object')
agdrift_empty.aquatic_body_type = pd.Series(
['epa_defined_pond',
'NaN',
'epa_defined_wetland',
'NaN',
'user_defined_pond',
'NaN',
'user_defined_wetland',
'NaN',
'epa_defined_wetland',
'NaN',
'user_defined_pond',
'NaN',
'user_defined_wetland',
'NaN',
'Defined Pond',
'user_defined_pond',
'epa_defined_pond',
'NaN',
'NaN',
'NaN',
'epa_defined_pond',
'user_defined_wetland',
'user_defined_pond'], dtype='object')
agdrift_empty.terrestrial_field_type = pd.Series(
['NaN',
'user_defined_terrestrial',
'NaN',
'epa_defined_terrestrial',
'NaN',
'user_defined_terrestrial',
'NaN',
'user_defined_terrestrial',
'NaN',
'epa_defined_terrestrial',
'NaN',
'user_defined_terrestrial',
'NaN',
'user_defined_terrestrial',
'NaN',
'NaN',
'NaN',
'user_defined_terrestrial',
'user_defined_terrestrial',
'user_defined_terrestrial',
'NaN',
'NaN',
'user_defined_terrestrial'], dtype='object')
agdrift_empty.drop_size_aerial = pd.Series(
['very_fine_to_fine',
'fine_to_medium',
'medium_to_coarse',
'coarse_to_very_coarse',
'fine_to_medium',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'medium_to_coarse',
'NaN',
'very_fine_to_medium',
'NaN',
'very_fine Indeed',
'NaN',
'very_fine_to_medium',
'medium_to_coarse',
'NaN'], dtype='object')
agdrift_empty.drop_size_ground = pd.Series(
['NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'very_fine',
'fine_to_medium-coarse',
'very_fine',
'fine_to_medium-coarse',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'very_fine',
'NaN',
'fine_to_medium-coarse',
'very_fine',
'NaN',
'very_fine_to_medium',
'NaN',
'very_fine'], dtype='object')
agdrift_empty.boom_height = pd.Series(
['NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'high',
'low',
'high',
'low',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'high',
'NaN',
'NaN',
'NaN',
'NaN'],dtype='object')
agdrift_empty.airblast_type = pd.Series(
['NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'normal',
'dense',
'sparse',
'orchard',
'vineyard',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'vineyard',
'NaN',
'NaN',
'NaN'], dtype='object')
agdrift_empty.validate_sim_scenarios()
result = agdrift_empty.out_sim_scenario_chk
npt.assert_array_equal(result, expected_result, err_msg="", verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_set_sim_scenario_id(self):
"""
:description provides scenario ids per simulation that match scenario names (i.e., column_names) from SQL database
:param out_sim_scenario_id: scenario name as assigned to individual simulations
:param num_simulations: number of simulations to assign scenario names
:param out_sim_scenario_chk: from previous method where scenarios were checked for validity
:param application_method: application method of scenario
:param drop_size_*: qualitative description of spray droplet size for aerial and ground applications
:param boom_height: qualitative height above ground of spray boom
:param airblast_type: type of airblast application (e.g., vineyard, orchard)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series(['aerial_vf2f',
'aerial_f2m',
'aerial_m2c',
'aerial_c2vc',
'ground_low_vf',
'ground_low_fmc',
'ground_high_vf',
'ground_high_fmc',
'airblast_normal',
'airblast_dense',
'airblast_sparse',
'airblast_vineyard',
'airblast_orchard',
'Invalid'], dtype='object')
try:
agdrift_empty.num_simulations = len(expected_result)
agdrift_empty.out_sim_scenario_chk = pd.Series(['Valid Tier I Aerial',
'Valid Tier I Aerial',
'Valid Tier I Aerial',
'Valid Tier I Aerial',
'Valid Tier I Ground',
'Valid Tier I Ground',
'Valid Tier I Ground',
'Valid Tier I Ground',
'Valid Tier I Airblast',
'Valid Tier I Airblast',
'Valid Tier I Airblast',
'Valid Tier I Airblast',
'Valid Tier I Airblast',
'Invalid Scenario'], dtype='object')
agdrift_empty.application_method = pd.Series(['tier_1_aerial',
'tier_1_aerial',
'tier_1_aerial',
'tier_1_aerial',
'tier_1_ground',
'tier_1_ground',
'tier_1_ground',
'tier_1_ground',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_airblast',
'tier_1_aerial'], dtype='object')
agdrift_empty.drop_size_aerial = pd.Series(['very_fine_to_fine',
'fine_to_medium',
'medium_to_coarse',
'coarse_to_very_coarse',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN'], dtype='object')
agdrift_empty.drop_size_ground = pd.Series(['NaN',
'NaN',
'NaN',
'NaN',
'very_fine',
'fine_to_medium-coarse',
'very_fine',
'fine_to_medium-coarse',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN'], dtype='object')
agdrift_empty.boom_height = pd.Series(['NaN',
'NaN',
'NaN',
'NaN',
'low',
'low',
'high',
'high',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN'], dtype='object')
agdrift_empty.airblast_type = pd.Series(['NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'NaN',
'normal',
'dense',
'sparse',
'vineyard',
'orchard',
'NaN'], dtype='object')
agdrift_empty.set_sim_scenario_id()
result = agdrift_empty.out_sim_scenario_id
npt.assert_array_equal(result, expected_result, err_msg="", verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_assign_column_names(self):
"""
:description assigns column names (except distaqnce column) from sql database to internal scenario names
:param column_name: short name for pesiticide application scenario for which distance vs deposition data is provided
:param scenario_name: internal variable for holding scenario names
:param scenario_number: index for scenario_name (this method assumes the distance values could occur in any column
:param distance_name: internal name for the column holding distance data
:NOTE to test both outputs of this method I simply appended them together
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
agdrift_empty.scenario_name = pd.Series([], dtype='object')
expected_result = pd.Series(['aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc',
'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse',
'airblast_vineyard', 'airblast_orchard'], dtype='object')
try:
agdrift_empty.column_names = pd.Series(['aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc',
'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse',
'airblast_vineyard', 'airblast_orchard', 'distance_ft'])
#call method to assign scenario names
agdrift_empty.assign_column_names()
result = agdrift_empty.scenario_name
npt.assert_array_equal(result, expected_result, err_msg="", verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_get_distances(self):
"""
:description retrieves distance values for deposition scenario datasets
: all scenarios use same distances
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE any blank fields are filled with 'nan'
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
agdrift_empty.db_name = os.path.join(location, 'sqlite_agdrift_distance.db')
agdrift_empty.db_table = 'output'
expected_result = pd.Series([], dtype='float')
try:
expected_result = [0.,0.102525,0.20505,0.4101,0.8202,1.6404,3.2808,4.9212,6.5616,9.8424,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632]
agdrift_empty.distance_name = 'distance_ft'
agdrift_empty.num_db_values = len(expected_result)
result = agdrift_empty.get_distances(agdrift_empty.num_db_values)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_get_scenario_deposition_data(self):
"""
:description retrieves deposition data for all scenarios from sql database
: and checks that for each the first, last, and total number of values
: are correct
:param scenario: name of scenario for which data is to be retrieved
:param num_values: number of values included in scenario datasets
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
#scenario_data = pd.Series([[]], dtype='float')
result = pd.Series([], dtype='float')
#changing expected values to the 161st
expected_result = [0.50013,0.041273,161.0, #aerial_vf2f
0.49997,0.011741,161.0, #aerial_f2m
0.4999,0.0053241,161.0, #aerial_m2c
0.49988,0.0031189,161.0, #aerial_c2vc
1.019339,9.66E-04,161.0, #ground_low_vf
1.007885,6.13E-04,161.0, #ground_low_fmc
1.055205,1.41E-03,161.0, #ground_high_vf
1.012828,7.72E-04,161.0, #ground_high_fmc
8.91E-03,3.87E-05,161.0, #airblast_normal
0.1155276,4.66E-04,161.0, #airblast_dense
0.4762651,5.14E-05,161.0, #airblast_sparse
3.76E-02,3.10E-05,161.0, #airblast_vineyard
0.2223051,3.58E-04,161.0] #airblast_orchard
try:
agdrift_empty.num_db_values = 161 #set number of data values in sql db
location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
agdrift_empty.db_name = os.path.join(location, 'sqlite_agdrift_distance.db')
agdrift_empty.db_table = 'output'
#agdrift_empty.db_name = 'sqlite_agdrift_distance.db'
#this is the list of scenario names (column names) in sql db (the order here is important because
#the expected values are ordered in this manner
agdrift_empty.scenario_name = ['aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc', 'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse', 'airblast_vineyard',
'airblast_orchard']
#cycle through reading scenarios and building result list
for i in range(len(agdrift_empty.scenario_name)):
#get scenario data
scenario_data = agdrift_empty.get_scenario_deposition_data(agdrift_empty.scenario_name[i],
agdrift_empty.num_db_values)
print(scenario_data)
#extract 1st and last values of scenario data and build result list (including how many values are
#retrieved for each scenario
if i == 0:
#fix this
result = [scenario_data[0], scenario_data[agdrift_empty.num_db_values - 1],
float(len(scenario_data))]
else:
result.extend([scenario_data[0], scenario_data[agdrift_empty.num_db_values - 1],
float(len(scenario_data))])
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_get_column_names(self):
"""
:description retrieves column names from sql database (sqlite_agdrift_distance.db)
: (each column name refers to a specific deposition scenario;
: the scenario name is used later to retrieve the deposition data)
:parameter output name of sql database table from which to retrieve requested data
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
agdrift_empty.db_name = os.path.join(location, 'sqlite_agdrift_distance.db')
agdrift_empty.db_table = 'output'
result = pd.Series([], dtype='object')
expected_result = ['distance_ft','aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc', 'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse', 'airblast_vineyard',
'airblast_orchard']
try:
result = agdrift_empty.get_column_names()
npt.assert_array_equal(result, expected_result, err_msg="", verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_filter_arrays(self):
"""
:description eliminate blank data cells (i.e., distances for which no deposition value is provided)
(and thus reduce the number of x,y values to be used)
:parameter x_in: array of distance values associated with values for a deposition scenario (e.g., Aerial/EPA Defined Pond)
:parameter y_in: array of deposition values associated with a deposition scenario (e.g., Aerial/EPA Defined Pond)
:parameter x_out: processed array of x_in values eliminating indices of blank distance/deposition values
:parameter y_out: processed array of y_in values eliminating indices of blank distance/deposition values
:NOTE y_in array is assumed to be populated by values >= 0. except for the blanks as 'nan' entries
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([0.,1.,4.,5.,6.,7.], dtype='float')
expected_result_y = pd.Series([10.,11.,14.,15.,16.,17.], dtype='float')
try:
x_in = pd.Series([0.,1.,2.,3.,4.,5.,6.,7.], dtype='float')
y_in = pd.Series([10.,11.,'nan','nan',14.,15.,16.,17.], dtype='float')
x_out, y_out = agdrift_empty.filter_arrays(x_in, y_in)
result_x = x_out
result_y = y_out
npt.assert_allclose(result_x, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(result_y, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result_x, expected_result_x]
tab = [result_y, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_list_sims_per_scenario(self):
"""
:description scan simulations and count number and indices of simulations that apply to each scenario
:parameter num_scenarios number of deposition scenarios included in SQL database
:parameter num_simulations number of simulations included in this model execution
:parameter scenario_name name of deposition scenario as recorded in SQL database
:parameter out_sim_scenario_id identification of deposition scenario specified per model run simulation
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_num_sims = pd.Series([2,2,2,2,2,2,2,2,2,2,2,2,2], dtype='int')
expected_sim_indices = pd.Series([[0,13,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,14,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[2,15,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[3,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[4,17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[5,18,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[6,19,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[7,20,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[8,21,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[9,22,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[10,23,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[11,24,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[12,25,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0]], dtype='int')
try:
agdrift_empty.scenario_name = pd.Series(['aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc', 'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse', 'airblast_vineyard',
'airblast_orchard'], dtype='object')
agdrift_empty.out_sim_scenario_id = pd.Series(['aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc', 'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse', 'airblast_vineyard',
'airblast_orchard','aerial_vf2f', 'aerial_f2m', 'aerial_m2c', 'aerial_c2vc',
'ground_low_vf', 'ground_low_fmc', 'ground_high_vf', 'ground_high_fmc',
'airblast_normal', 'airblast_dense', 'airblast_sparse', 'airblast_vineyard',
'airblast_orchard'], dtype='object')
agdrift_empty.num_simulations = len(agdrift_empty.out_sim_scenario_id)
agdrift_empty.num_scenarios = len(agdrift_empty.scenario_name)
result_num_sims, result_sim_indices = agdrift_empty.list_sims_per_scenario()
npt.assert_array_equal(result_num_sims, expected_num_sims, err_msg='', verbose=True)
npt.assert_array_equal(result_sim_indices, expected_sim_indices, err_msg='', verbose=True)
finally:
tab = [result_num_sims, expected_num_sims, result_sim_indices, expected_sim_indices]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_determine_area_dimensions(self):
"""
:description determine relevant area/length/depth of waterbody or terrestrial area
:param i: simulation number
:param ecosystem_type: type of assessment to be conducted
:param aquatic_body_type: source of dimensional data for area (EPA or User defined)
:param terrestrial_field_type: source of dimensional data for area (EPA or User defined)
:param *_width: default or user specified width of waterbody or terrestrial field
:param *_length: default or user specified length of waterbody or terrestrial field
:param *_depth: default or user specified depth of waterbody or terrestrial field
:NOTE all areas, i.e., ponds, wetlands, and terrestrial fields are of 1 hectare size; the user can elect
to specify a width other than the default width but it won't change the area size; thus for
user specified areas the length is calculated and not specified by the user)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_width = pd.Series([208.7, 208.7, 100., 400., 150., 0.], dtype='float')
expected_length = pd.Series([515.8, 515.8, 1076.39, 269.098, 717.593, 0.], dtype='float')
expected_depth = pd.Series([6.56, 0.4921, 7., 23., 0., 0.], dtype='float')
try:
agdrift_empty.ecosystem_type = pd.Series(['aquatic_assessment',
'aquatic_assessment',
'aquatic_assessment',
'aquatic_assessment',
'terrestrial_assessment',
'terrestrial_assessment'], dtype='object')
agdrift_empty.aquatic_body_type = pd.Series(['epa_defined_pond',
'epa_defined_wetland',
'user_defined_pond',
'user_defined_wetland',
'NaN',
'NaN'], dtype='object')
agdrift_empty.terrestrial_field_type = pd.Series(['NaN',
'NaN',
'NaN',
'NaN',
'user_defined_terrestrial',
'epa_defined_terrestrial'], dtype='object')
num_simulations = len(agdrift_empty.ecosystem_type)
agdrift_empty.default_width = 208.7
agdrift_empty.default_length = 515.8
agdrift_empty.default_pond_depth = 6.56
agdrift_empty.default_wetland_depth = 0.4921
agdrift_empty.user_pond_width = pd.Series(['NaN', 'NaN', 100., 'NaN', 'NaN', 'NaN'], dtype='float')
agdrift_empty.user_pond_depth = pd.Series(['NaN', 'NaN', 7., 'NaN', 'NaN', 'NaN'], dtype='float')
agdrift_empty.user_wetland_width = pd.Series(['NaN', 'NaN', 'NaN', 400., 'NaN', 'NaN'], dtype='float')
agdrift_empty.user_wetland_depth = pd.Series(['NaN','NaN', 'NaN', 23., 'NaN', 'NaN'], dtype='float')
agdrift_empty.user_terrestrial_width = pd.Series(['NaN', 'NaN', 'NaN', 'NaN', 150., 'NaN'], dtype='float')
width_result = pd.Series(num_simulations * ['NaN'], dtype='float')
length_result = pd.Series(num_simulations * ['NaN'], dtype='float')
depth_result = pd.Series(num_simulations * ['NaN'], dtype='float')
agdrift_empty.out_area_width = pd.Series(num_simulations * ['nan'], dtype='float')
agdrift_empty.out_area_length = pd.Series(num_simulations * ['nan'], dtype='float')
agdrift_empty.out_area_depth = pd.Series(num_simulations * ['nan'], dtype='float')
agdrift_empty.sqft_per_hectare = 107639
for i in range(num_simulations):
width_result[i], length_result[i], depth_result[i] = agdrift_empty.determine_area_dimensions(i)
npt.assert_allclose(width_result, expected_width, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(length_result, expected_length, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(depth_result, expected_depth, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [width_result, expected_width, length_result, expected_length, depth_result, expected_depth]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_foa(self):
"""
:description calculation of average deposition over width of water body
:param integration_result result of integration of deposition curve across the distance
: beginning at the near distance and extending to the far distance of the water body
:param integration_distance effectively the width of the water body
:param avg_dep_foa average deposition rate across the width of the water body
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([0.1538462, 0.5, 240.])
try:
integration_result = pd.Series([1.,125.,3e5], dtype='float')
integration_distance = pd.Series([6.5,250.,1250.], dtype='float')
result = agdrift_empty.calc_avg_dep_foa(integration_result, integration_distance)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_lbac(self):
"""
Deposition calculation.
:param avg_dep_foa: average deposition over width of water body as fraction of applied
:param application_rate: actual application rate
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([6.5, 3.125e4, 3.75e8])
try:
avg_dep_foa = pd.Series([1.,125.,3e5], dtype='float')
application_rate = pd.Series([6.5,250.,1250.], dtype='float')
result = agdrift_empty.calc_avg_dep_lbac(avg_dep_foa, application_rate)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_foa_from_lbac(self):
"""
Deposition calculation.
:param avg_dep_foa: average deposition over width of water body as fraction of applied
:param application_rate: actual application rate
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([1.553846e-01, 8.8e-06, 4.e-08])
try:
avg_dep_lbac = pd.Series([1.01, 0.0022, 0.00005], dtype='float')
application_rate = pd.Series([6.5,250.,1250.], dtype='float')
result = agdrift_empty.calc_avg_dep_foa_from_lbac(avg_dep_lbac, application_rate)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_lbac_from_gha(self):
"""
Deposition calculation.
:param avg_dep_gha: average deposition over width of water body in units of grams/hectare
:param gms_per_lb: conversion factor to convert lbs to grams
:param acres_per_hectare: conversion factor to convert hectares to acres
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([0.01516739, 0.111524, 0.267659])
try:
avg_dep_gha = pd.Series([17., 125., 3e2], dtype='float')
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.acres_per_hectare = 2.471
result = agdrift_empty.calc_avg_dep_lbac_from_gha(avg_dep_gha)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_lbac_from_waterconc_ngl(self):
"""
:description calculate the average deposition onto the pond/wetland/field
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:param area_width: average width of water body
:parem area_length: average length of water body
:param area_depth: average depth of water body
:param gms_per_lb: conversion factor to convert lbs to grams
:param ng_per_gram conversion factor
:param sqft_per_acre conversion factor
:param liters_per_ft3 conversion factor
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([2.311455e-05, 2.209479e-03, 2.447423e-03])
try:
avg_waterconc_ngl = pd.Series([17., 125., 3e2], dtype='float')
area_width = pd.Series([50., 200., 500.], dtype='float')
area_length = pd.Series([6331., 538., 215.], dtype='float')
area_depth = pd.Series([0.5, 6.5, 3.], dtype='float')
agdrift_empty.liters_per_ft3 = 28.3168
agdrift_empty.sqft_per_acre = 43560.
agdrift_empty.ng_per_gram = 1.e9
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.acres_per_hectare = 2.471
result = agdrift_empty.calc_avg_dep_lbac_from_waterconc_ngl(avg_waterconc_ngl, area_width,
area_length, area_depth)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_lbac_from_mgcm2(self):
"""
:description calculate the average deposition of pesticide over the terrestrial field in lbs/acre
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:param area_depth: average depth of water body
:param gms_per_lb: conversion factor to convert lbs to grams
:param mg_per_gram conversion factor
:param sqft_per_acre conversion factor
:param cm2_per_ft2 conversion factor
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([2.676538e-02, 2.2304486, 44.608973])
try:
avg_fielddep_mgcm2 = pd.Series([3.e-4, 2.5e-2, 5.e-01])
agdrift_empty.sqft_per_acre = 43560.
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.cm2_per_ft2 = 929.03
agdrift_empty.mg_per_gram = 1.e3
result = agdrift_empty.calc_avg_dep_lbac_from_mgcm2(avg_fielddep_mgcm2)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_dep_gha(self):
"""
:description average deposition over width of water body in grams per acre
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:param gms_per_lb: conversion factor to convert lbs to grams
:param acres_per_hectare: conversion factor to convert acres to hectares
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([1.401061, 0.3648362, 0.03362546])
try:
avg_dep_lbac = pd.Series([1.25e-3,3.255e-4,3e-5], dtype='float')
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.acres_per_hectare = 2.47105
result = agdrift_empty.calc_avg_dep_gha(avg_dep_lbac)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_waterconc_ngl(self):
"""
:description calculate the average concentration of pesticide in the pond/wetland
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:param area_width: average width of water body
:parem area_length: average length of water body
:param area_depth: average depth of water body
:param gms_per_lb: conversion factor to convert lbs to grams
:param ng_per_gram conversion factor
:param sqft_per_acre conversion factor
:param liters_per_ft3 conversion factor
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([70.07119, 18.24654, 22.41823])
try:
avg_dep_lbac = pd.Series([1.25e-3,3.255e-4,3e-5], dtype='float')
area_width = pd.Series([6.56, 208.7, 997.], dtype='float')
area_length = pd.Series([1.640838e4, 515.7595, 107.9629], dtype='float')
area_depth = pd.Series([6.56, 6.56, 0.4921], dtype='float')
agdrift_empty.ng_per_gram = 1.e9
agdrift_empty.liters_per_ft3 = 28.3168
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.sqft_per_acre = 43560.
result = agdrift_empty.calc_avg_waterconc_ngl(avg_dep_lbac ,area_width, area_length, area_depth)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_calc_avg_fielddep_mgcm2(self):
"""
:description calculate the average deposition of pesticide over the terrestrial field
:param avg_dep_lbac: average deposition over width of water body in lbs per acre
:param area_depth: average depth of water body
:param gms_per_lb: conversion factor to convert lbs to grams
:param mg_per_gram conversion factor
:param sqft_per_acre conversion factor
:param cm2_per_ft2 conversion factor
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result = pd.Series([1.401063e-5, 3.648369e-6, 3.362552e-7])
try:
avg_dep_lbac = pd.Series([1.25e-3,3.255e-4,3e-5], dtype='float')
agdrift_empty.gms_per_lb = 453.592
agdrift_empty.sqft_per_acre = 43560.
agdrift_empty.mg_per_gram = 1.e3
agdrift_empty.cm2_per_ft2 = 929.03
result = agdrift_empty.calc_avg_fielddep_mgcm2(avg_dep_lbac)
npt.assert_allclose(result, expected_result, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_generate_running_avg(self):
"""
:description retrieves values for distance and the first deposition scenario from the sql database
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE any blank fields are filled with 'nan'
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
agdrift_empty.db_name = os.path.join(location, 'sqlite_agdrift_distance.db')
agdrift_empty.db_table = 'output'
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
expected_result_npts = pd.Series([], dtype='object')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,0.102525,0.20505,0.4101,0.8202,1.6404,3.2808,4.9212,6.5616,9.8424,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016]
expected_result_y = [0.364712246,0.351507467,0.339214283,0.316974687,0.279954504,0.225948786,0.159949625,
0.123048839,0.099781801,0.071666234,0.056352938,0.03860139,0.029600805,0.024150524,
0.020550354,0.01795028,0.015967703,0.014467663,0.013200146,0.01215011,0.011300098,
0.010550085,0.009905072,0.009345065,0.008845057,0.008400051,0.008000046,0.007635043,
0.007300039,0.007000034,0.006725033,0.00646503,0.006230027,0.006010027,0.005805023,
0.005615023,0.005435021,0.00527002,0.00511002,0.004960017,0.004820017,0.004685016,
0.004560015,0.004440015,0.004325013,0.004220012,0.004120012,0.004020012,0.003925011,
0.003835011,0.00375001,0.00367001,0.00359001,0.00351001,0.003435009,0.003365009,
0.003300007,0.003235009,0.003170007,0.003110007,0.003055006,0.003000007,0.002945006,
0.002895006,0.002845006,0.002795006,0.002745006,0.002695006,0.002650005,0.002610005,
0.002570005,0.002525006,0.002485004,0.002450005,0.002410005,0.002370005,0.002335004,
0.002300005,0.002265004,0.002235004,0.002205004,0.002175004,0.002145004,0.002115004,
0.002085004,0.002055004,0.002025004,0.002000002,0.001975004,0.001945004,0.001920002,
0.001900002,0.001875004,0.001850002,0.001830002,0.001805004,0.001780002,0.001760002,
0.001740002,0.001720002,0.001700002,0.001680002,0.001660002,0.001640002,0.001620002,
0.001605001,0.001590002,0.001570002,0.001550002,0.001535001,0.001520002,0.001500002,
0.001485001,0.001470002,0.001455001,0.001440002,0.001425001,0.001410002,0.001395001,
0.001385001,0.001370002,0.001355001,0.001340002,0.001325001,0.001315001,0.001305001,
0.001290002,0.001275001,0.001265001,0.001255001,0.001245001,0.001230002,0.001215001,
0.001205001,0.001195001,0.001185001,0.001175001,0.001165001,0.001155001,0.001145001,
0.001135001,0.001125001,0.001115001,0.001105001,0.001095001,0.001085001,0.001075001,
0.001065001,0.00106,0.001055001,0.001045001,0.001035001,0.001025001,0.001015001,
0.001005001,0.0009985,0.000993001,0.000985001,0.000977001,0.000969501]
expected_result_npts = 160
x_dist = 6.56
agdrift_empty.distance_name = 'distance_ft'
agdrift_empty.scenario_name = 'ground_low_vf'
agdrift_empty.num_db_values = 161
x_array_in = agdrift_empty.get_distances(agdrift_empty.num_db_values)
y_array_in = agdrift_empty.get_scenario_deposition_data(agdrift_empty.scenario_name, agdrift_empty.num_db_values)
x_array_out, y_array_out, npts_out = agdrift_empty.generate_running_avg(agdrift_empty.num_db_values,
x_array_in, y_array_in, x_dist)
# write output arrays to excel file -- just for debugging
agdrift_empty.write_arrays_to_csv(x_array_out, y_array_out, "output_array_generate.csv")
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_generate_running_avg1(self):
"""
:description creates a running average for a specified x axis width (e.g., 7-day average values of an array)
:param x_array_in: array of x-axis values
:param y_array_in: array of y-axis values
:param num_db_values: number of points in the input arrays
:param x_array_out: array of x-zxis values in output array
:param y_array_out: array of y-axis values in output array
:param npts_out: number of points in the output array
:param x_dist: width in x_axis units of running weighted average
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE This test uses a uniformly spaced x_array and monotonically increasing y_array
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
expected_result_npts = pd.Series([], dtype='object')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.]
expected_result_y = [2.5,3.5,4.5,5.5,6.5,7.5,8.5,9.5,10.5,11.5,
12.5,13.5,14.5,15.5,16.5,17.5,18.5,19.5,20.5,21.5,
22.5,23.5,24.5,25.5,26.5,27.5,28.5,29.5,30.5,31.5,
32.5,33.5,34.5,35.5,36.5,37.5,38.5,39.5,40.5,41.5,
42.5,43.5,44.5,45.5, 46.5]
expected_result_npts = 45
x_dist = 5.
num_db_values = 51
x_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.]
y_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.]
x_array_out, y_array_out, npts_out = agdrift_empty.generate_running_avg(num_db_values, x_array_in,
y_array_in, x_dist)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_generate_running_avg2(self):
"""
:description creates a running average for a specified x axis width (e.g., 7-day average values of an array)
:param x_array_in: array of x-axis values
:param y_array_in: array of y-axis values
:param num_db_values: number of points in the input arrays
:param x_array_out: array of x-zxis values in output array
:param y_array_out: array of y-axis values in output array
:param npts_out: number of points in the output array
:param x_dist: width in x_axis units of running weighted average
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE This test uses a non-uniformly spaced x_array and monotonically increasing y_array
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
expected_result_npts = pd.Series([], dtype='object')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.5,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.5,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.5,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.5,42.,43.,44.]
expected_result_y = [2.5,3.5,4.5,5.5,6.5,7.5,8.4666667,9.4,10.4,11.4,
12.4,13.975,14.5,15.5,16.5,17.5,18.466666667,19.4,20.4,21.4,
22.4,23.975,24.5,25.5,26.5,27.5,28.46666667,29.4,30.4,31.4,
32.4,33.975,34.5,35.5,36.5,37.5,38.466666667,39.4,40.4,41.4,
42.4,43.975,44.5,45.5, 46.5]
expected_result_npts = 45
x_dist = 5.
agdrift_empty.num_db_values = 51
x_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.5,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.5,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.5,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.5,42.,43.,44.,45.,46.,47.,48.,49.,50.]
y_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.]
x_array_out, y_array_out, npts_out = agdrift_empty.generate_running_avg(agdrift_empty.num_db_values,
x_array_in, y_array_in, x_dist)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_generate_running_avg3(self):
"""
:description creates a running average for a specified x axis width (e.g., 7-day average values of an array);
averages reflect weighted average assuming linearity between x points;
average is calculated as the area under the y-curve beginning at each x point and extending out x_dist
divided by x_dist (which yields the weighted average y between the relevant x points)
:param x_array_in: array of x-axis values
:param y_array_in: array of y-axis values
:param num_db_values: number of points in the input arrays
:param x_array_out: array of x-zxis values in output array
:param y_array_out: array of y-axis values in output array
:param npts_out: number of points in the output array
:param x_dist: width in x_axis units of running weighted average
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE This test uses a monotonically increasing y_array and inserts a gap in the x values
that is greater than x_dist
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
expected_result_npts = pd.Series([], dtype='object')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,1.,2.,3.,4.,5.,6.,7.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.,51.,52.]
expected_result_y = [2.5,3.5,4.5,5.4111111,6.14444444,6.7,7.07777777,7.277777777,10.5,11.5,
12.5,13.5,14.5,15.5,16.5,17.5,18.5,19.5,20.5,21.5,
22.5,23.5,24.5,25.5,26.5,27.5,28.5,29.5,30.5,31.5,
32.5,33.5,34.5,35.5,36.5,37.5,38.5,39.5,40.5,41.5,
42.5,43.5,44.5,45.5, 46.5]
expected_result_npts = 45
x_dist = 5.
num_db_values = 51
x_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.,
51.,52.,53.,54.,55.,56.,57.,58.]
y_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.]
x_array_out, y_array_out, npts_out = agdrift_empty.generate_running_avg(num_db_values, x_array_in,
y_array_in, x_dist)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_locate_integrated_avg(self):
"""
:description retrieves values for distance and the first deposition scenario from the sql database
and generates running weighted averages from the first x,y value until it locates the user
specified integrated average of interest
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE any blank fields are filled with 'nan'
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
agdrift_empty.db_name = os.path.join(location, 'sqlite_agdrift_distance.db')
agdrift_empty.db_table = 'output'
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
expected_result_npts = pd.Series([], dtype='object')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,0.102525,0.20505,0.4101,0.8202,1.6404,3.2808,4.9212,6.5616,9.8424,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016]
expected_result_y = [0.364712246,0.351507467,0.339214283,0.316974687,0.279954504,0.225948786,0.159949625,
0.123048839,0.099781801,0.071666234,0.056352938,0.03860139,0.029600805,0.024150524,
0.020550354,0.01795028,0.015967703,0.014467663,0.013200146,0.01215011,0.011300098,
0.010550085,0.009905072,0.009345065,0.008845057,0.008400051,0.008000046,0.007635043,
0.007300039,0.007000034,0.006725033,0.00646503,0.006230027,0.006010027,0.005805023,
0.005615023,0.005435021,0.00527002,0.00511002,0.004960017,0.004820017,0.004685016,
0.004560015,0.004440015,0.004325013,0.004220012,0.004120012,0.004020012,0.003925011,
0.003835011,0.00375001,0.00367001,0.00359001,0.00351001,0.003435009,0.003365009,
0.003300007,0.003235009,0.003170007,0.003110007,0.003055006,0.003000007,0.002945006,
0.002895006,0.002845006,0.002795006,0.002745006,0.002695006,0.002650005,0.002610005,
0.002570005,0.002525006,0.002485004,0.002450005,0.002410005,0.002370005,0.002335004,
0.002300005,0.002265004,0.002235004,0.002205004,0.002175004,0.002145004,0.002115004,
0.002085004,0.002055004,0.002025004,0.002000002,0.001975004,0.001945004,0.001920002,
0.001900002,0.001875004,0.001850002,0.001830002,0.001805004,0.001780002,0.001760002,
0.001740002,0.001720002,0.001700002,0.001680002,0.001660002,0.001640002,0.001620002,
0.001605001,0.001590002,0.001570002,0.001550002,0.001535001,0.001520002,0.001500002,
0.001485001,0.001470002,0.001455001,0.001440002,0.001425001,0.001410002,0.001395001,
0.001385001,0.001370002,0.001355001,0.001340002,0.001325001,0.001315001,0.001305001,
0.001290002,0.001275001,0.001265001,0.001255001,0.001245001,0.001230002,0.001215001,
0.001205001,0.001195001,0.001185001,0.001175001,0.001165001,0.001155001,0.001145001,
0.001135001,0.001125001,0.001115001,0.001105001,0.001095001,0.001085001,0.001075001,
0.001065001,0.00106,0.001055001,0.001045001,0.001035001,0.001025001,0.001015001,
0.001005001,0.0009985,0.000993001,0.000985001,0.000977001,0.000969501]
expected_result_npts = 160
expected_x_dist_of_interest = 990.8016
x_dist = 6.56
weighted_avg = 0.0009697 #this is the running average value we're looking for
agdrift_empty.distance_name = 'distance_ft'
agdrift_empty.scenario_name = 'ground_low_vf'
agdrift_empty.num_db_values = 161
agdrift_empty.find_nearest_x = True
x_array_in = agdrift_empty.get_distances(agdrift_empty.num_db_values)
y_array_in = agdrift_empty.get_scenario_deposition_data(agdrift_empty.scenario_name, agdrift_empty.num_db_values)
x_array_out, y_array_out, npts_out, x_dist_of_interest, range_chk = \
agdrift_empty.locate_integrated_avg(agdrift_empty.num_db_values, x_array_in, y_array_in, x_dist, weighted_avg)
npt.assert_array_equal(expected_x_dist_of_interest, x_dist_of_interest, verbose=True)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} x-units to area and got {1} '.format(expected_x_dist_of_interest, x_dist_of_interest))
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_locate_integrated_avg1(self):
"""
:description retrieves values for distance and the first deposition scenario from the sql database
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE this test is for a monotonically increasing function with some irregularity in x-axis points
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,7.0,16.0,17.0,18.0,19.0,20.0,28.0,29.0,30.0,31.]
expected_result_y = [0.357143,1.27778,4.4125,5.15,5.7125,6.1,6.3125,9.5,10.5,11.5,12.5]
expected_result_npts = 11
expected_x_dist_of_interest = 30.5
x_dist = 5.
weighted_avg = 12.
num_db_values = 51
x_array_in = [0.,7.,16.,17.,18.,19.,20.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.,
51.,52.,53.,54.,55.,56.,57.,58.,59.,60.,
61.,62.,63.,64.,65.,66.,67.,68.,69.,70.,
71.]
y_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,10.,
11.,12.,13.,14.,15.,16.,17.,18.,19.,20.,
21.,22.,23.,24.,25.,26.,27.,28.,29.,30.,
31.,32.,33.,34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.]
agdrift_empty.find_nearest_x = True
x_array_out, y_array_out, npts_out, x_dist_of_interest, range_chk = \
agdrift_empty.locate_integrated_avg(num_db_values, x_array_in, y_array_in, x_dist, weighted_avg)
npt.assert_array_equal(expected_x_dist_of_interest, x_dist_of_interest, verbose=True)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} x-units to area and got {1} '.format(expected_x_dist_of_interest, x_dist_of_interest))
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_locate_integrated_avg2(self):
"""
:description retrieves values for distance and the first deposition scenario from the sql database
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE This test is for a monotonically decreasing function with irregular x-axis spacing
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,7.,16.,17.,18.,19.,20.,28.,29.,30.,
34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.,
51.,52.,53.,54.,55.,56.,57.,58.,59.,60.]
expected_result_y = [49.6429,48.7222,45.5875,44.85,44.2875,43.9,43.6875,41.175,40.7,40.3,
37.5,36.5,35.5,34.5,33.5,32.5,31.5,30.5,29.5,28.5,
27.5,26.5,25.5,24.5,23.5,22.5,21.5,20.5,19.5,18.5,
17.5,16.5,15.5,14.5,13.5,12.5,11.5]
expected_result_npts = 37
expected_x_dist_of_interest = 60.
x_dist = 5.
weighted_avg = 12.
num_db_values = 51
agdrift_empty.find_nearest_x = True
x_array_in = [0.,7.,16.,17.,18.,19.,20.,28.,29.,30.,
34.,35.,36.,37.,38.,39.,40.,
41.,42.,43.,44.,45.,46.,47.,48.,49.,50.,
51.,52.,53.,54.,55.,56.,57.,58.,59.,60.,
61.,62.,63.,64.,65.,66.,67.,68.,69.,70.,
71.,72.,73.,74. ]
y_array_in = [50.,49.,48.,47.,46.,45.,44.,43.,42.,41.,
40.,39.,38.,37.,36.,35.,34.,33.,32.,31.,
30.,29.,28.,27.,26.,25.,24.,23.,22.,21.,
20.,19.,18.,17.,16.,15.,14.,13.,12.,11.,
10.,9.,8.,7.,6.,5.,4.,3.,2.,1.,0.]
x_array_out, y_array_out, npts_out, x_dist_of_interest, range_chk = \
agdrift_empty.locate_integrated_avg(num_db_values, x_array_in, y_array_in, x_dist, weighted_avg)
npt.assert_array_equal(expected_x_dist_of_interest, x_dist_of_interest, verbose=True)
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} x-units to area and got {1} '.format(expected_x_dist_of_interest, x_dist_of_interest))
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_locate_integrated_avg3(self):
"""
:description retrieves values for distance and the first deposition scenario from the sql database
:param num_db_values: number of distance values to be retrieved
:param distance_name: name of column in sql database that contains the distance values
:NOTE this test is for a monotonically decreasing function with regular x-axis spacing
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
x_array_in = pd.Series([], dtype='float')
y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
expected_result_x_dist = pd.Series([], dtype='float')
try:
expected_result_x = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,
10.,11.,12.,13.,14.,15.,16.,17.,18.,19.,
20.,21.,22.,23.,24.,25.,26.,27.,28.,29.,
30.,31.,32.,33.,34.,35.,36.]
expected_result_y = [47.5,46.5,45.5,44.5,43.5,42.5,41.5,40.5,39.5,38.5,
37.5,36.5,35.5,34.5,33.5,32.5,31.5,30.5,29.5,28.5,
27.5,26.5,25.5,24.5,23.5,22.5,21.5,20.5,19.5,18.5,
17.5,16.5,15.5,14.5,13.5,12.5,11.5]
expected_result_npts = 37
expected_x_dist_of_interest = 36.
x_dist = 5.
weighted_avg = 12.
num_db_values = 51
agdrift_empty.find_nearest_x = True
x_array_in = [0.,1.,2.,3.,4.,5.,6.,7.,8.,9.,
10.,11.,12.,13.,14.,15.,16.,17.,18.,19.,
20.,21.,22.,23.,24.,25.,26.,27.,28.,29.,
30.,31.,32.,33.,34.,35.,36.,37.,38.,39.,
40.,41.,42.,43.,44.,45.,46.,47.,48.,49.,
50.]
y_array_in = [50.,49.,48.,47.,46.,45.,44.,43.,42.,41.,
40.,39.,38.,37.,36.,35.,34.,33.,32.,31.,
30.,29.,28.,27.,26.,25.,24.,23.,22.,21.,
20.,19.,18.,17.,16.,15.,14.,13.,12.,11.,
10.,9.,8.,7.,6.,5.,4.,3.,2.,1.,0.]
x_array_out, y_array_out, npts_out, x_dist_of_interest, range_chk = \
agdrift_empty.locate_integrated_avg(num_db_values, x_array_in, y_array_in, x_dist, weighted_avg)
npt.assert_array_equal(expected_x_dist_of_interest, x_dist_of_interest, verbose=True )
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True )
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} x-units to area and got {1} '.format(expected_x_dist_of_interest, x_dist_of_interest))
print('expected {0} number of points and got {1} points'.format(expected_result_npts, npts_out))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_round_model_outputs(self):
"""
:description round output variable values (and place in output variable series) so that they can be directly
compared to expected results (which were limited in terms of their output format from the OPP AGDRIFT
model (V2.1.1) interface (we don't have the AGDRIFT code so we cannot change the output format to
agree with this model
:param avg_dep_foa:
:param avg_dep_lbac:
:param avg_dep_gha:
:param avg_waterconc_ngl:
:param avg_field_dep_mgcm2:
:param num_sims: number of simulations
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
num_sims = 3
num_args = 5
agdrift_empty.out_avg_dep_foa = pd.Series(num_sims * [np.nan], dtype='float')
agdrift_empty.out_avg_dep_lbac = pd.Series(num_sims * [np.nan], dtype='float')
agdrift_empty.out_avg_dep_gha = pd.Series(num_sims * [np.nan], dtype='float')
agdrift_empty.out_avg_waterconc_ngl = pd.Series(num_sims * [np.nan], dtype='float')
agdrift_empty.out_avg_field_dep_mgcm2 = pd.Series(num_sims * [np.nan], dtype='float')
result = pd.Series(num_sims * [num_args*[np.nan]], dtype='float')
expected_result = pd.Series(num_sims * [num_args*[np.nan]], dtype='float')
expected_result[0] = [1.26,1.26,1.26,1.26,1.26]
expected_result[1] = [0.0004,0.0004,0.0004,0.0004,0.0004]
expected_result[2] = [3.45e-05,3.45e-05,3.45e-05,3.45e-05,3.45e-05]
try:
#setting each variable to same values, each value tests a separate pathway through rounding method
avg_dep_lbac = pd.Series([1.2567,3.55e-4,3.454e-5], dtype='float')
avg_dep_foa = pd.Series([1.2567,3.55e-4,3.454e-5], dtype='float')
avg_dep_gha = pd.Series([1.2567,3.55e-4,3.454e-5], dtype='float')
avg_waterconc_ngl = pd.Series([1.2567,3.55e-4,3.454e-5], dtype='float')
avg_field_dep_mgcm2 = pd.Series([1.2567,3.55e-4,3.454e-5], dtype='float')
for i in range(num_sims):
lbac = avg_dep_lbac[i]
foa = avg_dep_foa[i]
gha = avg_dep_gha[i]
ngl = avg_waterconc_ngl[i]
mgcm2 = avg_field_dep_mgcm2[i]
agdrift_empty.round_model_outputs(foa, lbac, gha, ngl, mgcm2, i)
result[i] = [agdrift_empty.out_avg_dep_foa[i], agdrift_empty.out_avg_dep_lbac[i],
agdrift_empty.out_avg_dep_gha[i], agdrift_empty.out_avg_waterconc_ngl[i],
agdrift_empty.out_avg_field_dep_mgcm2[i]]
npt.assert_allclose(result[0], expected_result[0], rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(result[1], expected_result[1], rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(result[2], expected_result[2], rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_find_dep_pt_location(self):
"""
:description this method locates the downwind distance associated with a specific deposition rate
:param x_array: array of distance values
:param y_array: array of deposition values
:param npts: number of values in x/y arrays
:param foa: value of deposition (y value) of interest
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
result = [[],[],[],[]]
expected_result = [(0.0, 'in range'), (259.1832, 'in range'), (997.3632, 'in range'), (np.nan, 'out of range')]
try:
x_array = [0.,0.102525,0.20505,0.4101,0.8202,1.6404,3.2808,4.9212,6.5616,9.8424,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016, 997.3632]
y_array = [0.364706389,0.351133211,0.338484161,0.315606383,0.277604029,0.222810736,0.159943507,
0.121479708,0.099778741,0.068653,0.05635,0.0386,0.0296,0.02415,0.02055,0.01795,
0.0159675,0.0144675,0.0132,0.01215,0.0113,0.01055,0.009905,0.009345,0.008845,0.0084,
0.008,0.007635,0.0073,0.007,0.006725,0.006465,0.00623,0.00601,0.005805,0.005615,
0.005435,0.00527,0.00511,0.00496,0.00482,0.004685,0.00456,0.00444,0.004325,0.00422,
0.00412,0.00402,0.003925,0.003835,0.00375,0.00367,0.00359,0.00351,0.003435,0.003365,
0.0033,0.003235,0.00317,0.00311,0.003055,0.003,0.002945,0.002895,0.002845,0.002795,
0.002745,0.002695,0.00265,0.00261,0.00257,0.002525,0.002485,0.00245,0.00241,0.00237,
0.002335,0.0023,0.002265,0.002235,0.002205,0.002175,0.002145,0.002115,0.002085,
0.002055,0.002025,0.002,0.001975,0.001945,0.00192,0.0019,0.001875,0.00185,0.00183,
0.001805,0.00178,0.00176,0.00174,0.00172,0.0017,0.00168,0.00166,0.00164,0.00162,
0.001605,0.00159,0.00157,0.00155,0.001535,0.00152,0.0015,0.001485,0.00147,0.001455,
0.00144,0.001425,0.00141,0.001395,0.001385,0.00137,0.001355,0.00134,0.001325,0.001315,
0.001305,0.00129,0.001275,0.001265,0.001255,0.001245,0.00123,0.001215,0.001205,
0.001195,0.001185,0.001175,0.001165,0.001155,0.001145,0.001135,0.001125,0.001115,
0.001105,0.001095,0.001085,0.001075,0.001065,0.00106,0.001055,0.001045,0.001035,
0.001025,0.001015,0.001005,0.0009985,0.000993,0.000985,0.000977,0.0009695,0.0009612]
npts = len(x_array)
num_sims = 4
foa = [0.37, 0.004, 0.0009613, 0.0008]
for i in range(num_sims):
result[i] = agdrift_empty.find_dep_pt_location(x_array, y_array, npts, foa[i])
npt.assert_equal(expected_result, result, verbose=True)
finally:
tab = [result, expected_result]
print("\n")
print(inspect.currentframe().f_code.co_name)
print(tabulate(tab, headers='keys', tablefmt='rst'))
return
def test_extend_curve_opp(self):
"""
:description extends/extrapolates an x,y array of data points that reflect a ln ln relationship by selecting
a number of points near the end of the x,y arrays and fitting a line to the points
ln ln transforms (two ln ln transforms can by applied; on using the straight natural log of
each selected x,y point and one using a 'relative' value of each of the selected points --
the relative values are calculated by establishing a zero point closest to the selected
points
For AGDRIFT: extends distance vs deposition (fraction of applied) curve to enable model calculations
when area of interest (pond, wetland, terrestrial field) lie partially outside the original
curve (whose extent is 997 feet). The extension is achieved by fitting a line of best fit
to the last 16 points of the original curve. The x,y values representing the last 16 points
are natural log transforms of the distance and deposition values at the 16 points. Two long
transforms are coded here, reflecting the fact that the AGDRIFT model (v2.1.1) uses each of them
under different circumstandes (which I believe is not the intention but is the way the model
functions -- my guess is that one of the transforms was used and then a second one was coded
to increase the degree of conservativeness -- but the code was changed in only one of the two
places where the transformation occurs.
Finally, the AGDRIFT model extends the curve only when necessary (i.e., when it determines that
the area of intereest lies partially beyond the last point of the origanal curve (997 ft). In
this code all the curves are extended out to 1994 ft, which represents the furthest distance that
the downwind edge of an area of concern can be specified. All scenario curves are extended here
because we are running multiple simulations (e.g., monte carlo) and instead of extending the
curves each time a simulation requires it (which may be multiple time for the same scenario
curve) we just do it for all curves up front. There is a case to be made that the
curves should be extended external to this code and simply provide the full curve in the SQLite
database containing the original curve.
:param x_array: array of x values to be extended (must be at least 17 data points in original array)
:param y_array: array of y values to be extended
:param max_dist: maximum distance (ft) associated with unextended x values
:param dist_inc: increment (ft) for each extended data point
:param num_pts_ext: number of points at end of original x,y arrays to be used for extending the curve
:param ln_ln_trans: form of transformation to perform (True: straight ln ln, False: relative ln ln)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
# x_array_in = pd.Series([], dtype='float')
# y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632,
1003.9232,1010.4832,1017.0432,1023.6032,1030.1632,1036.7232,1043.2832,1049.8432,1056.4032,
1062.9632,1069.5232,1076.0832,1082.6432,1089.2032,1095.7632,1102.3232,1108.8832,1115.4432,
1122.0032,1128.5632,1135.1232,1141.6832,1148.2432,1154.8032,1161.3632,1167.9232,1174.4832,
1181.0432,1187.6032,1194.1632,1200.7232,1207.2832,1213.8432,1220.4032,1226.9632,1233.5232,
1240.0832,1246.6432,1253.2032,1259.7632,1266.3232,1272.8832,1279.4432,1286.0032,1292.5632,
1299.1232,1305.6832,1312.2432,1318.8032,1325.3632,1331.9232,1338.4832,1345.0432,1351.6032,
1358.1632,1364.7232,1371.2832,1377.8432,1384.4032,1390.9632,1397.5232,1404.0832,1410.6432,
1417.2032,1423.7632,1430.3232,1436.8832,1443.4432,1450.0032,1456.5632,1463.1232,1469.6832,
1476.2432,1482.8032,1489.3632,1495.9232,1502.4832,1509.0432,1515.6032,1522.1632,1528.7232,
1535.2832,1541.8432,1548.4032,1554.9632,1561.5232,1568.0832,1574.6432,1581.2032,1587.7632,
1594.3232,1600.8832,1607.4432,1614.0032,1620.5632,1627.1232,1633.6832,1640.2432,1646.8032,
1653.3632,1659.9232,1666.4832,1673.0432,1679.6032,1686.1632,1692.7232,1699.2832,1705.8432,
1712.4032,1718.9632,1725.5232,1732.0832,1738.6432,1745.2032,1751.7632,1758.3232,1764.8832,
1771.4432,1778.0032,1784.5632,1791.1232,1797.6832,1804.2432,1810.8032,1817.3632,1823.9232,
1830.4832,1837.0432,1843.6032,1850.1632,1856.7232,1863.2832,1869.8432,1876.4032,1882.9632,
1889.5232,1896.0832,1902.6432,1909.2032,1915.7632,1922.3232,1928.8832,1935.4432,1942.0032,
1948.5632,1955.1232,1961.6832,1968.2432,1974.8032,1981.3632,1987.9232,1994.4832]
expected_result_y = [0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741,1.1826345E-02,1.1812256E-02,
1.1798945E-02,1.1786331E-02,1.1774344E-02,1.1762927E-02,1.1752028E-02,1.1741602E-02,
1.1731610E-02,1.1722019E-02,1.1712796E-02,1.1703917E-02,1.1695355E-02,1.1687089E-02,
1.1679100E-02,1.1671370E-02,1.1663883E-02,1.1656623E-02,1.1649579E-02,1.1642737E-02,
1.1636087E-02,1.1629617E-02,1.1623319E-02,1.1617184E-02,1.1611203E-02,1.1605369E-02,
1.1599676E-02,1.1594116E-02,1.1588684E-02,1.1583373E-02,1.1578179E-02,1.1573097E-02,
1.1568122E-02,1.1563249E-02,1.1558475E-02,1.1553795E-02,1.1549206E-02,1.1544705E-02,
1.1540288E-02,1.1535953E-02,1.1531695E-02,1.1527514E-02,1.1523405E-02,1.1519367E-02,
1.1515397E-02,1.1511493E-02,1.1507652E-02,1.1503873E-02,1.1500154E-02,1.1496493E-02,
1.1492889E-02,1.1489338E-02,1.1485841E-02,1.1482395E-02,1.1478999E-02,1.1475651E-02,
1.1472351E-02,1.1469096E-02,1.1465886E-02,1.1462720E-02,1.1459595E-02,1.1456512E-02,
1.1453469E-02,1.1450465E-02,1.1447499E-02,1.1444570E-02,1.1441677E-02,1.1438820E-02,
1.1435997E-02,1.1433208E-02,1.1430452E-02,1.1427728E-02,1.1425036E-02,1.1422374E-02,
1.1419742E-02,1.1417139E-02,1.1414566E-02,1.1412020E-02,1.1409502E-02,1.1407011E-02,
1.1404546E-02,1.1402107E-02,1.1399693E-02,1.1397304E-02,1.1394939E-02,1.1392598E-02,
1.1390281E-02,1.1387986E-02,1.1385713E-02,1.1383463E-02,1.1381234E-02,1.1379026E-02,
1.1376840E-02,1.1374673E-02,1.1372527E-02,1.1370400E-02,1.1368292E-02,1.1366204E-02,
1.1364134E-02,1.1362082E-02,1.1360048E-02,1.1358032E-02,1.1356033E-02,1.1354052E-02,
1.1352087E-02,1.1350139E-02,1.1348207E-02,1.1346291E-02,1.1344390E-02,1.1342505E-02,
1.1340635E-02,1.1338781E-02,1.1336941E-02,1.1335115E-02,1.1333304E-02,1.1331507E-02,
1.1329723E-02,1.1327954E-02,1.1326197E-02,1.1324454E-02,1.1322724E-02,1.1321007E-02,
1.1319303E-02,1.1317611E-02,1.1315931E-02,1.1314263E-02,1.1312608E-02,1.1310964E-02,
1.1309332E-02,1.1307711E-02,1.1306101E-02,1.1304503E-02,1.1302915E-02,1.1301339E-02,
1.1299773E-02,1.1298218E-02,1.1296673E-02,1.1295138E-02,1.1293614E-02,1.1292099E-02,
1.1290594E-02,1.1289100E-02,1.1287614E-02,1.1286139E-02,1.1284672E-02,1.1283215E-02,
1.1281767E-02,1.1280328E-02,1.1278898E-02,1.1277477E-02,1.1276065E-02,1.1274661E-02]
expected_result_npts = [305]
max_dist = 997.3632
dist_inc = 6.56
num_pts_ext = 16
ln_ln_trans = False #using the relative ln ln transformation in this test
agdrift_empty.meters_per_ft = 0.3048
x_array_in = pd.Series([0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632])
y_array_in = pd.Series([0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457 ,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741])
x_array_out, y_array_out = agdrift_empty.extend_curve_opp(x_array_in, y_array_in, max_dist, dist_inc, num_pts_ext,
ln_ln_trans)
npts_out = [len(y_array_out)]
#
#agdrift_empty.write_arrays_to_csv(x_array_out, y_array_out, "extend_data.csv")
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts[0], npts_out[0]))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_extend_curve_opp1(self):
"""
:description extends/extrapolates an x,y array of data points that reflect a ln ln relationship by selecting
a number of points near the end of the x,y arrays and fitting a line to the points
ln ln transforms (two ln ln transforms can by applied; on using the straight natural log of
each selected x,y point and one using a 'relative' value of each of the selected points --
the relative values are calculated by establishing a zero point closest to the selected
points
For AGDRIFT: extends distance vs deposition (fraction of applied) curve to enable model calculations
when area of interest (pond, wetland, terrestrial field) lie partially outside the original
curve (whose extent is 997 feet). The extension is achieved by fitting a line of best fit
to the last 16 points of the original curve. The x,y values representing the last 16 points
are natural log transforms of the distance and deposition values at the 16 points. Two long
transforms are coded here, reflecting the fact that the AGDRIFT model (v2.1.1) uses each of them
under different circumstandes (which I believe is not the intention but is the way the model
functions -- my guess is that one of the transforms was used and then a second one was coded
to increase the degree of conservativeness -- but the code was changed in only one of the two
places where the transformation occurs.
Finally, the AGDRIFT model extends the curve only when necessary (i.e., when it determines that
the area of intereest lies partially beyond the last point of the origanal curve (997 ft). In
this code all the curves are extended out to 1994 ft, which represents the furthest distance that
the downwind edge of an area of concern can be specified. All scenario curves are extended here
because we are running multiple simulations (e.g., monte carlo) and instead of extending the
curves each time a simulation requires it (which may be multiple time for the same scenario
curve) we just do it for all curves up front. There is a case to be made that the
curves should be extended external to this code and simply provide the full curve in the SQLite
database containing the original curve.
:param x_array: array of x values to be extended (must be at least 17 data points in original array)
:param y_array: array of y values to be extended
:param max_dist: maximum distance (ft) associated with unextended x values
:param dist_inc: increment (ft) for each extended data point
:param num_pts_ext: number of points at end of original x,y arrays to be used for extending the curve
:param ln_ln_trans: form of transformation to perform (True: straight ln ln, False: relative ln ln)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
# x_array_in = pd.Series([], dtype='float')
# y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632,
1003.9232,1010.4832,1017.0432,1023.6032,1030.1632,1036.7232,1043.2832,1049.8432,1056.4032,
1062.9632,1069.5232,1076.0832,1082.6432,1089.2032,1095.7632,1102.3232,1108.8832,1115.4432,
1122.0032,1128.5632,1135.1232,1141.6832,1148.2432,1154.8032,1161.3632,1167.9232,1174.4832,
1181.0432,1187.6032,1194.1632,1200.7232,1207.2832,1213.8432,1220.4032,1226.9632,1233.5232,
1240.0832,1246.6432,1253.2032,1259.7632,1266.3232,1272.8832,1279.4432,1286.0032,1292.5632,
1299.1232,1305.6832,1312.2432,1318.8032,1325.3632,1331.9232,1338.4832,1345.0432,1351.6032,
1358.1632,1364.7232,1371.2832,1377.8432,1384.4032,1390.9632,1397.5232,1404.0832,1410.6432,
1417.2032,1423.7632,1430.3232,1436.8832,1443.4432,1450.0032,1456.5632,1463.1232,1469.6832,
1476.2432,1482.8032,1489.3632,1495.9232,1502.4832,1509.0432,1515.6032,1522.1632,1528.7232,
1535.2832,1541.8432,1548.4032,1554.9632,1561.5232,1568.0832,1574.6432,1581.2032,1587.7632,
1594.3232,1600.8832,1607.4432,1614.0032,1620.5632,1627.1232,1633.6832,1640.2432,1646.8032,
1653.3632,1659.9232,1666.4832,1673.0432,1679.6032,1686.1632,1692.7232,1699.2832,1705.8432,
1712.4032,1718.9632,1725.5232,1732.0832,1738.6432,1745.2032,1751.7632,1758.3232,1764.8832,
1771.4432,1778.0032,1784.5632,1791.1232,1797.6832,1804.2432,1810.8032,1817.3632,1823.9232,
1830.4832,1837.0432,1843.6032,1850.1632,1856.7232,1863.2832,1869.8432,1876.4032,1882.9632,
1889.5232,1896.0832,1902.6432,1909.2032,1915.7632,1922.3232,1928.8832,1935.4432,1942.0032,
1948.5632,1955.1232,1961.6832,1968.2432,1974.8032,1981.3632,1987.9232,1994.4832]
expected_result_y = [0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741,1.16941E-02,1.16540E-02,
1.16144E-02,1.15752E-02,1.15363E-02,1.14978E-02,1.14597E-02,1.14219E-02,1.13845E-02,
1.13475E-02,1.13108E-02,1.12744E-02,1.12384E-02,1.12027E-02,1.11674E-02,1.11323E-02,
1.10976E-02,1.10632E-02,1.10291E-02,1.09953E-02,1.09618E-02,1.09286E-02,1.08957E-02,
1.08630E-02,1.08307E-02,1.07986E-02,1.07668E-02,1.07353E-02,1.07040E-02,1.06730E-02,
1.06423E-02,1.06118E-02,1.05816E-02,1.05516E-02,1.05218E-02,1.04923E-02,1.04631E-02,
1.04341E-02,1.04053E-02,1.03767E-02,1.03484E-02,1.03203E-02,1.02924E-02,1.02647E-02,
1.02372E-02,1.02100E-02,1.01829E-02,1.01561E-02,1.01295E-02,1.01031E-02,1.00768E-02,
1.00508E-02,1.00250E-02,9.99932E-03,9.97386E-03,9.94860E-03,9.92351E-03,9.89861E-03,
9.87389E-03,9.84934E-03,9.82498E-03,9.80078E-03,9.77676E-03,9.75291E-03,9.72923E-03,
9.70571E-03,9.68236E-03,9.65916E-03,9.63613E-03,9.61326E-03,9.59055E-03,9.56799E-03,
9.54558E-03,9.52332E-03,9.50122E-03,9.47926E-03,9.45745E-03,9.43578E-03,9.41426E-03,
9.39287E-03,9.37163E-03,9.35053E-03,9.32957E-03,9.30874E-03,9.28804E-03,9.26748E-03,
9.24705E-03,9.22675E-03,9.20657E-03,9.18653E-03,9.16661E-03,9.14682E-03,9.12714E-03,
9.10760E-03,9.08817E-03,9.06886E-03,9.04967E-03,9.03060E-03,9.01164E-03,8.99280E-03,
8.97407E-03,8.95546E-03,8.93696E-03,8.91856E-03,8.90028E-03,8.88210E-03,8.86404E-03,
8.84608E-03,8.82822E-03,8.81047E-03,8.79282E-03,8.77527E-03,8.75782E-03,8.74048E-03,
8.72323E-03,8.70608E-03,8.68903E-03,8.67208E-03,8.65522E-03,8.63845E-03,8.62178E-03,
8.60521E-03,8.58872E-03,8.57233E-03,8.55602E-03,8.53981E-03,8.52368E-03,8.50765E-03,
8.49170E-03,8.47583E-03,8.46005E-03,8.44436E-03,8.42875E-03,8.41323E-03,8.39778E-03,
8.38242E-03,8.36714E-03,8.35194E-03,8.33682E-03,8.32178E-03,8.30682E-03,8.29193E-03,
8.27713E-03,8.26240E-03,8.24774E-03,8.23316E-03,8.21866E-03,8.20422E-03,8.18987E-03,
8.17558E-03,8.16137E-03,8.14722E-03]
expected_result_npts = [305]
max_dist = 997.3632
dist_inc = 6.56
num_pts_ext = 16
ln_ln_trans = True #using the absolute ln ln transformation in this test
agdrift_empty.meters_per_ft = 0.3048
x_array_in = pd.Series([0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632])
y_array_in = pd.Series([0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741])
x_array_out, y_array_out = agdrift_empty.extend_curve_opp(x_array_in, y_array_in, max_dist, dist_inc, num_pts_ext,
ln_ln_trans)
npts_out = [len(y_array_out)]
#
#agdrift_empty.write_arrays_to_csv(x_array_out, y_array_out, "extend_data.csv")
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-4, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts[0], npts_out[0]))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_extend_curve(self):
"""
:description extends/extrapolates an x,y array of data points that reflect a ln ln relationship by selecting
a number of points near the end of the x,y arrays and fitting a line to the points
ln ln transforms (two ln ln transforms can by applied; on using the straight natural log of
each selected x,y point and one using a 'relative' value of each of the selected points --
the relative values are calculated by establishing a zero point closest to the selected
points
For AGDRIFT: extends distance vs deposition (fraction of applied) curve to enable model calculations
when area of interest (pond, wetland, terrestrial field) lie partially outside the original
curve (whose extent is 997 feet). The extension is achieved by fitting a line of best fit
to the last 16 points of the original curve. The x,y values representing the last 16 points
are natural log transforms of the distance and deposition values at the 16 points. Two long
transforms are coded here, reflecting the fact that the AGDRIFT model (v2.1.1) uses each of them
under different circumstandes (which I believe is not the intention but is the way the model
functions -- my guess is that one of the transforms was used and then a second one was coded
to increase the degree of conservativeness -- but the code was changed in only one of the two
places where the transformation occurs.
Finally, the AGDRIFT model extends the curve only when necessary (i.e., when it determines that
the area of intereest lies partially beyond the last point of the origanal curve (997 ft). In
this code all the curves are extended out to 1994 ft, which represents the furthest distance that
the downwind edge of an area of concern can be specified. All scenario curves are extended here
because we are running multiple simulations (e.g., monte carlo) and instead of extending the
curves each time a simulation requires it (which may be multiple time for the same scenario
curve) we just do it for all curves up front. There is a case to be made that the
curves should be extended external to this code and simply provide the full curve in the SQLite
database containing the original curve.
:param x_array: array of x values to be extended (must be at least 17 data points in original array)
:param y_array: array of y values to be extended
:param max_dist: maximum distance (ft) associated with unextended x values
:param dist_inc: increment (ft) for each extended data point
:param num_pts_ext: number of points at end of original x,y arrays to be used for extending the curve
:param ln_ln_trans: form of transformation to perform (True: straight ln ln, False: relative ln ln)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
# x_array_in = pd.Series([], dtype='float')
# y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632,
1003.9232,1010.4832,1017.0432,1023.6032,1030.1632,1036.7232,1043.2832,1049.8432,1056.4032,
1062.9632,1069.5232,1076.0832,1082.6432,1089.2032,1095.7632,1102.3232,1108.8832,1115.4432,
1122.0032,1128.5632,1135.1232,1141.6832,1148.2432,1154.8032,1161.3632,1167.9232,1174.4832,
1181.0432,1187.6032,1194.1632,1200.7232,1207.2832,1213.8432,1220.4032,1226.9632,1233.5232,
1240.0832,1246.6432,1253.2032,1259.7632,1266.3232,1272.8832,1279.4432,1286.0032,1292.5632,
1299.1232,1305.6832,1312.2432,1318.8032,1325.3632,1331.9232,1338.4832,1345.0432,1351.6032,
1358.1632,1364.7232,1371.2832,1377.8432,1384.4032,1390.9632,1397.5232,1404.0832,1410.6432,
1417.2032,1423.7632,1430.3232,1436.8832,1443.4432,1450.0032,1456.5632,1463.1232,1469.6832,
1476.2432,1482.8032,1489.3632,1495.9232,1502.4832,1509.0432,1515.6032,1522.1632,1528.7232,
1535.2832,1541.8432,1548.4032,1554.9632,1561.5232,1568.0832,1574.6432,1581.2032,1587.7632,
1594.3232,1600.8832,1607.4432,1614.0032,1620.5632,1627.1232,1633.6832,1640.2432,1646.8032,
1653.3632,1659.9232,1666.4832,1673.0432,1679.6032,1686.1632,1692.7232,1699.2832,1705.8432,
1712.4032,1718.9632,1725.5232,1732.0832,1738.6432,1745.2032,1751.7632,1758.3232,1764.8832,
1771.4432,1778.0032,1784.5632,1791.1232,1797.6832,1804.2432,1810.8032,1817.3632,1823.9232,
1830.4832,1837.0432,1843.6032,1850.1632,1856.7232,1863.2832,1869.8432,1876.4032,1882.9632,
1889.5232,1896.0832,1902.6432,1909.2032,1915.7632,1922.3232,1928.8832,1935.4432,1942.0032,
1948.5632,1955.1232,1961.6832,1968.2432,1974.8032,1981.3632,1987.9232,1994.4832]
expected_result_y = [0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741,0.011695283,0.01165546,
0.011616029,0.011576983,0.011538317,0.011500024,0.011462099,0.011424535,0.011387327,
0.01135047,0.011313958,0.011277785,0.011241946,0.011206437,0.011171253,0.011136388,
0.011101837,0.011067597,0.011033662,0.011000028,0.010966691,0.010933646,0.010900889,
0.010868416,0.010836222,0.010804305,0.01077266,0.010741283,0.01071017,0.010679318,
0.010648723,0.010618382,0.010588291,0.010558447,0.010528846,0.010499485,0.010470361,
0.010441471,0.010412812,0.010384381,0.010356174,0.010328189,0.010300423,0.010272873,
0.010245536,0.01021841,0.010191491,0.010164778,0.010138268,0.010111958,0.010085846,
0.010059928,0.010034204,0.01000867,0.009983324,0.009958164,0.009933188,0.009908393,
0.009883777,0.009859339,0.009835075,0.009810984,0.009787064,0.009763313,0.009739729,
0.00971631,0.009693054,0.00966996,0.009647024,0.009624247,0.009601625,0.009579157,
0.009556841,0.009534676,0.009512659,0.009490791,0.009469067,0.009447488,0.009426051,
0.009404755,0.009383599,0.00936258,0.009341698,0.00932095,0.009300337,0.009279855,
0.009259504,0.009239282,0.009219188,0.009199221,0.009179379,0.009159662,0.009140066,
0.009120593,0.009101239,0.009082005,0.009062888,0.009043888,0.009025004,0.009006234,
0.008987576,0.008969031,0.008950597,0.008932272,0.008914057,0.008895949,0.008877947,
0.008860051,0.00884226,0.008824572,0.008806987,0.008789503,0.00877212,0.008754837,
0.008737652,0.008720565,0.008703575,0.008686681,0.008669882,0.008653177,0.008636566,
0.008620047,0.008603619,0.008587282,0.008571035,0.008554878,0.008538808,0.008522826,
0.008506931,0.008491122,0.008475398,0.008459758,0.008444202,0.008428729,0.008413338,
0.008398029,0.0083828,0.008367652,0.008352583,0.008337592,0.00832268,0.008307845,
0.008293086,0.008278404,0.008263797,0.008249265,0.008234806,0.008220422,0.00820611,
0.00819187,0.008177702,0.008163606]
expected_result_npts = [305]
max_dist = 997.3632
dist_inc = 6.56
num_pts_ext = 15
ln_ln_trans = True
x_array_in = pd.Series([0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632])
y_array_in = pd.Series([0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741])
x_array_out, y_array_out = agdrift_empty.extend_curve(x_array_in, y_array_in, max_dist, dist_inc, num_pts_ext,
ln_ln_trans)
npts_out = [len(y_array_out)]
#
#agdrift_empty.write_arrays_to_csv(x_array_out, y_array_out, "extend_data.csv")
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts[0], npts_out[0]))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
def test_extend_curve1(self):
"""
:description extends/extrapolates an x,y array of data points that reflect a ln ln relationship by selecting
a number of points near the end of the x,y arrays and fitting a line to the points
ln ln transforms (two ln ln transforms can by applied; on using the straight natural log of
each selected x,y point and one using a 'relative' value of each of the selected points --
the relative values are calculated by establishing a zero point closest to the selected
points
For AGDRIFT: extends distance vs deposition (fraction of applied) curve to enable model calculations
when area of interest (pond, wetland, terrestrial field) lie partially outside the original
curve (whose extent is 997 feet). The extension is achieved by fitting a line of best fit
to the last 16 points of the original curve. The x,y values representing the last 16 points
are natural log transforms of the distance and deposition values at the 16 points. Two long
transforms are coded here, reflecting the fact that the AGDRIFT model (v2.1.1) uses each of them
under different circumstandes (which I believe is not the intention but is the way the model
functions -- my guess is that one of the transforms was used and then a second one was coded
to increase the degree of conservativeness -- but the code was changed in only one of the two
places where the transformation occurs.
Finally, the AGDRIFT model extends the curve only when necessary (i.e., when it determines that
the area of intereest lies partially beyond the last point of the origanal curve (997 ft). In
this code all the curves are extended out to 1994 ft, which represents the furthest distance that
the downwind edge of an area of concern can be specified. All scenario curves are extended here
because we are running multiple simulations (e.g., monte carlo) and instead of extending the
curves each time a simulation requires it (which may be multiple time for the same scenario
curve) we just do it for all curves up front. There is a case to be made that the
curves should be extended external to this code and simply provide the full curve in the SQLite
database containing the original curve.
:param x_array: array of x values to be extended (must be at least 17 data points in original array)
:param y_array: array of y values to be extended
:param max_dist: maximum distance (ft) associated with unextended x values
:param dist_inc: increment (ft) for each extended data point
:param num_pts_ext: number of points at end of original x,y arrays to be used for extending the curve
:param ln_ln_trans: form of transformation to perform (True: straight ln ln, False: relative ln ln)
:return:
"""
# create empty pandas dataframes to create empty object for this unittest
agdrift_empty = self.create_agdrift_object()
expected_result_x = pd.Series([], dtype='float')
expected_result_y = pd.Series([], dtype='float')
# x_array_in = pd.Series([], dtype='float')
# y_array_in = pd.Series([], dtype='float')
x_array_out = pd.Series([], dtype='float')
y_array_out = pd.Series([], dtype='float')
try:
expected_result_x = [0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632,
1003.9232,1010.4832,1017.0432,1023.6032,1030.1632,1036.7232,1043.2832,1049.8432,1056.4032,
1062.9632,1069.5232,1076.0832,1082.6432,1089.2032,1095.7632,1102.3232,1108.8832,1115.4432,
1122.0032,1128.5632,1135.1232,1141.6832,1148.2432,1154.8032,1161.3632,1167.9232,1174.4832,
1181.0432,1187.6032,1194.1632,1200.7232,1207.2832,1213.8432,1220.4032,1226.9632,1233.5232,
1240.0832,1246.6432,1253.2032,1259.7632,1266.3232,1272.8832,1279.4432,1286.0032,1292.5632,
1299.1232,1305.6832,1312.2432,1318.8032,1325.3632,1331.9232,1338.4832,1345.0432,1351.6032,
1358.1632,1364.7232,1371.2832,1377.8432,1384.4032,1390.9632,1397.5232,1404.0832,1410.6432,
1417.2032,1423.7632,1430.3232,1436.8832,1443.4432,1450.0032,1456.5632,1463.1232,1469.6832,
1476.2432,1482.8032,1489.3632,1495.9232,1502.4832,1509.0432,1515.6032,1522.1632,1528.7232,
1535.2832,1541.8432,1548.4032,1554.9632,1561.5232,1568.0832,1574.6432,1581.2032,1587.7632,
1594.3232,1600.8832,1607.4432,1614.0032,1620.5632,1627.1232,1633.6832,1640.2432,1646.8032,
1653.3632,1659.9232,1666.4832,1673.0432,1679.6032,1686.1632,1692.7232,1699.2832,1705.8432,
1712.4032,1718.9632,1725.5232,1732.0832,1738.6432,1745.2032,1751.7632,1758.3232,1764.8832,
1771.4432,1778.0032,1784.5632,1791.1232,1797.6832,1804.2432,1810.8032,1817.3632,1823.9232,
1830.4832,1837.0432,1843.6032,1850.1632,1856.7232,1863.2832,1869.8432,1876.4032,1882.9632,
1889.5232,1896.0832,1902.6432,1909.2032,1915.7632,1922.3232,1928.8832,1935.4432,1942.0032,
1948.5632,1955.1232,1961.6832,1968.2432,1974.8032,1981.3632,1987.9232,1994.4832]
expected_result_y = [0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741,0.011826349,0.011812263,
0.011798955,0.011786343,0.011774359,0.011762944,0.011752047,0.011741623,0.011731633,
0.011722043,0.011712822,0.011703943,0.011695383,0.011687118,0.01167913,0.011671401,
0.011663915,0.011656656,0.011649613,0.011642772,0.011636122,0.011629653,0.011623356,
0.011617221,0.011611241,0.011605408,0.011599715,0.011594155,0.011588724,0.011583413,
0.01157822,0.011573138,0.011568163,0.011563291,0.011558517,0.011553838,0.011549249,
0.011544748,0.011540332,0.011535997,0.01153174,0.011527558,0.01152345,0.011519412,
0.011515442,0.011511538,0.011507698,0.011503919,0.011500201,0.01149654,0.011492935,
0.011489385,0.011485888,0.011482442,0.011479046,0.011475699,0.011472399,0.011469144,
0.011465934,0.011462768,0.011459644,0.011456561,0.011453518,0.011450514,0.011447548,
0.011444619,0.011441727,0.011438869,0.011436047,0.011433258,0.011430502,0.011427778,
0.011425086,0.011422424,0.011419792,0.01141719,0.011414616,0.011412071,0.011409553,
0.011407062,0.011404597,0.011402158,0.011399744,0.011397355,0.01139499,0.01139265,
0.011390332,0.011388037,0.011385765,0.011383515,0.011381286,0.011379078,0.011376891,
0.011374725,0.011372579,0.011370452,0.011368344,0.011366256,0.011364186,0.011362134,
0.011360101,0.011358085,0.011356086,0.011354104,0.01135214,0.011350191,0.011348259,
0.011346343,0.011344443,0.011342558,0.011340688,0.011338834,0.011336994,0.011335168,
0.011333357,0.01133156,0.011329777,0.011328007,0.011326251,0.011324508,0.011322778,
0.011321061,0.011319356,0.011317664,0.011315985,0.011314317,0.011312661,0.011311018,
0.011309385,0.011307764,0.011306155,0.011304557,0.011302969,0.011301393,0.011299827,
0.011298272,0.011296727,0.011295192,0.011293668,0.011292153,0.011290649,0.011289154,
0.011287669,0.011286193,0.011284727,0.011283269,0.011281822,0.011280383,0.011278953,
0.011277532,0.011276119,0.011274716]
expected_result_npts = [305]
max_dist = 997.3632
dist_inc = 6.56
num_pts_ext = 16
ln_ln_trans = False
x_array_in = pd.Series([0.,6.5616,13.1232,19.6848,26.2464,
32.808,39.3696,45.9312,52.4928,59.0544,65.616,72.1776,78.7392,85.3008,91.8624,98.424,104.9856,
111.5472,118.1088,124.6704,131.232,137.7936,144.3552,150.9168,157.4784,164.04,170.6016,177.1632,
183.7248,190.2864,196.848,203.4096,209.9712,216.5328,223.0944,229.656,236.2176,242.7792,249.3408,
255.9024,262.464,269.0256,275.5872,282.1488,288.7104,295.272,301.8336,308.3952,314.9568,321.5184,
328.08,334.6416,341.2032,347.7648,354.3264,360.888,367.4496,374.0112,380.5728,387.1344,393.696,
400.2576,406.8192,413.3808,419.9424,426.504,433.0656,439.6272,446.1888,452.7504,459.312,465.8736,
472.4352,478.9968,485.5584,492.12,498.6816,505.2432,511.8048,518.3664,524.928,531.4896,538.0512,
544.6128,551.1744,557.736,564.2976,570.8592,577.4208,583.9824,590.544,597.1056,603.6672,610.2288,
616.7904,623.352,629.9136,636.4752,643.0368,649.5984,656.16,662.7216,669.2832,675.8448,682.4064,
688.968,695.5296,702.0912,708.6528,715.2144,721.776,728.3376,734.8992,741.4608,748.0224,754.584,
761.1456,767.7072,774.2688,780.8304,787.392,793.9536,800.5152,807.0768,813.6384,820.2,826.7616,
833.3232,839.8848,846.4464,853.008,859.5696,866.1312,872.6928,879.2544,885.816,892.3776,898.9392,
905.5008,912.0624,918.624,925.1856,931.7472,938.3088,944.8704,951.432,957.9936,964.5552,971.1168,
977.6784,984.24,990.8016,997.3632])
y_array_in = pd.Series([0.49997,0.37451,0.29849,0.25004,0.2138,0.19455,0.18448,0.17591,0.1678,0.15421,0.1401,
0.12693,0.11785,0.11144,0.10675,0.099496,0.092323,0.085695,0.079234,0.074253,0.070316,
0.067191,0.064594,0.062337,0.060348,0.058192,0.055224,0.051972,0.049283,0.04757,
0.046226,0.044969,0.043922,0.043027,0.041934,0.040528,0.039018,0.037744,0.036762,
0.035923,0.035071,0.034267,0.033456,0.032629,0.03184,0.031078,0.030363,0.02968,0.029028,
0.028399,0.027788,0.027199,0.026642,0.026124,0.025635,0.02517,0.024719,0.024287,0.023867,
0.023457,0.023061,0.022685,0.022334,0.021998,0.021675,0.02136,0.021055,0.020758,0.020467,
0.020186,0.019919,0.019665,0.019421,0.019184,0.018951,0.018727,0.018514,0.018311,
0.018118,0.017929,0.017745,0.017564,0.017387,0.017214,0.017046,0.016886,0.016732,
0.016587,0.016446,0.016309,0.016174,0.016039,0.015906,0.015777,0.015653,0.015532,
0.015418,0.015308,0.015202,0.015097,0.014991,0.014885,0.014782,0.014683,0.014588,0.0145,
0.014415,0.014334,0.014254,0.014172,0.01409,0.014007,0.013926,0.013846,0.01377,0.013697,
0.013628,0.013559,0.013491,0.013423,0.013354,0.013288,0.013223,0.01316,0.013099,0.01304,
0.012983,0.012926,0.01287,0.012814,0.012758,0.012703,0.012649,0.012597,0.012547,0.012499,
0.01245,0.012402,0.012352,0.012302,0.012254,0.012205,0.012158,0.012113,0.012068,0.012025,
0.011982,0.01194,0.011899,0.011859,0.011819,0.01178,0.011741])
x_array_out, y_array_out = agdrift_empty.extend_curve(x_array_in, y_array_in, max_dist, dist_inc, num_pts_ext,
ln_ln_trans)
npts_out = [len(y_array_out)]
#
#agdrift_empty.write_arrays_to_csv(x_array_out, y_array_out, "extend_data.csv")
npt.assert_array_equal(expected_result_npts, npts_out, verbose=True)
npt.assert_allclose(x_array_out, expected_result_x, rtol=1e-5, atol=0, err_msg='', verbose=True)
npt.assert_allclose(y_array_out, expected_result_y, rtol=1e-5, atol=0, err_msg='', verbose=True)
finally:
pass
tab1 = [x_array_out, expected_result_x]
tab2 = [y_array_out, expected_result_y]
print("\n")
print(inspect.currentframe().f_code.co_name)
print('expected {0} number of points and got {1} points'.format(expected_result_npts[0], npts_out[0]))
print("x_array result/x_array_expected")
print(tabulate(tab1, headers='keys', tablefmt='rst'))
print("y_array result/y_array_expected")
print(tabulate(tab2, headers='keys', tablefmt='rst'))
return
# unittest will
# 1) call the setup method
# 2) then call every method starting with "test",
# 3) then the teardown method
if __name__ == '__main__':
unittest.main()
#pass
| 65.427173
| 130
| 0.570404
| 23,341
| 167,101
| 3.966968
| 0.099739
| 0.006588
| 0.009266
| 0.011794
| 0.82246
| 0.810158
| 0.797177
| 0.790816
| 0.781906
| 0.774616
| 0
| 0.342492
| 0.307138
| 167,101
| 2,553
| 131
| 65.452801
| 0.457254
| 0.179556
| 0
| 0.760509
| 0
| 0
| 0.060121
| 0.009807
| 0
| 0
| 0
| 0
| 0.036504
| 1
| 0.019358
| false
| 0.007743
| 0.006084
| 0
| 0.044248
| 0.079093
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f7e33255cecf34be2a64f39cc577f4baac49dfff
| 66,762
|
py
|
Python
|
lookerapi/apis/group_api.py
|
llooker/python_sdk
|
8364839b1de0519771f2f749e45b4e6cb1c75577
|
[
"MIT"
] | 12
|
2019-05-17T11:50:45.000Z
|
2021-11-11T21:37:05.000Z
|
lookerapi/apis/group_api.py
|
meetup/lookerapi
|
4e4160dbe007eb65ac8f449bead0cdc06090b07b
|
[
"MIT"
] | 4
|
2019-06-19T20:13:14.000Z
|
2020-10-13T21:13:47.000Z
|
lookerapi/apis/group_api.py
|
meetup/lookerapi
|
4e4160dbe007eb65ac8f449bead0cdc06090b07b
|
[
"MIT"
] | 10
|
2018-10-10T20:45:00.000Z
|
2022-02-21T03:12:58.000Z
|
# coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning)
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class GroupApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def add_group_group(self, group_id, **kwargs):
"""
Add a Group to Group
### Adds a new group to a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_group_group(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param GroupIdForGroupInclusion body: Group id to add
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_group_group_with_http_info(group_id, **kwargs)
else:
(data) = self.add_group_group_with_http_info(group_id, **kwargs)
return data
def add_group_group_with_http_info(self, group_id, **kwargs):
"""
Add a Group to Group
### Adds a new group to a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_group_group_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param GroupIdForGroupInclusion body: Group id to add
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_group_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `add_group_group`")
collection_formats = {}
resource_path = '/groups/{group_id}/groups'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_group_user(self, group_id, **kwargs):
"""
Add a User to Group
### Adds a new user to a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_group_user(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param GroupIdForGroupUserInclusion body: User id to add
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.add_group_user_with_http_info(group_id, **kwargs)
else:
(data) = self.add_group_user_with_http_info(group_id, **kwargs)
return data
def add_group_user_with_http_info(self, group_id, **kwargs):
"""
Add a User to Group
### Adds a new user to a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.add_group_user_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param GroupIdForGroupUserInclusion body: User id to add
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_group_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `add_group_user`")
collection_formats = {}
resource_path = '/groups/{group_id}/users'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def all_group_groups(self, group_id, **kwargs):
"""
Get All Groups in Group
### Get information about all the groups in a group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_group_groups(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:return: list[Group]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.all_group_groups_with_http_info(group_id, **kwargs)
else:
(data) = self.all_group_groups_with_http_info(group_id, **kwargs)
return data
def all_group_groups_with_http_info(self, group_id, **kwargs):
"""
Get All Groups in Group
### Get information about all the groups in a group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_group_groups_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:return: list[Group]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_group_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `all_group_groups`")
collection_formats = {}
resource_path = '/groups/{group_id}/groups'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Group]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def all_group_users(self, group_id, **kwargs):
"""
Get All Users in Group
### Get information about all the users directly included in a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_group_users(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:param int page: Requested page.
:param int per_page: Results per page.
:param str sorts: Fields to sort by.
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.all_group_users_with_http_info(group_id, **kwargs)
else:
(data) = self.all_group_users_with_http_info(group_id, **kwargs)
return data
def all_group_users_with_http_info(self, group_id, **kwargs):
"""
Get All Users in Group
### Get information about all the users directly included in a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_group_users_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:param int page: Requested page.
:param int per_page: Results per page.
:param str sorts: Fields to sort by.
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'fields', 'page', 'per_page', 'sorts']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_group_users" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `all_group_users`")
collection_formats = {}
resource_path = '/groups/{group_id}/users'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'sorts' in params:
query_params['sorts'] = params['sorts']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def all_groups(self, **kwargs):
"""
Get All Groups
### Get information about all groups.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_groups(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str fields: Requested fields.
:param int page: Requested page.
:param int per_page: Results per page.
:param str sorts: Fields to sort by.
:param list[int] ids: Optional of ids to get specific groups.
:param int content_metadata_id: Id of content metadata to which groups must have access.
:param bool can_add_to_content_metadata: Select only groups that either can/cannot be given access to content.
:return: list[Group]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.all_groups_with_http_info(**kwargs)
else:
(data) = self.all_groups_with_http_info(**kwargs)
return data
def all_groups_with_http_info(self, **kwargs):
"""
Get All Groups
### Get information about all groups.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.all_groups_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str fields: Requested fields.
:param int page: Requested page.
:param int per_page: Results per page.
:param str sorts: Fields to sort by.
:param list[int] ids: Optional of ids to get specific groups.
:param int content_metadata_id: Id of content metadata to which groups must have access.
:param bool can_add_to_content_metadata: Select only groups that either can/cannot be given access to content.
:return: list[Group]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'page', 'per_page', 'sorts', 'ids', 'content_metadata_id', 'can_add_to_content_metadata']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method all_groups" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'sorts' in params:
query_params['sorts'] = params['sorts']
if 'ids' in params:
query_params['ids'] = params['ids']
collection_formats['ids'] = 'csv'
if 'content_metadata_id' in params:
query_params['content_metadata_id'] = params['content_metadata_id']
if 'can_add_to_content_metadata' in params:
query_params['can_add_to_content_metadata'] = params['can_add_to_content_metadata']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Group]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_group(self, **kwargs):
"""
Create Group
### Creates a new group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Group body: Group
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_group_with_http_info(**kwargs)
else:
(data) = self.create_group_with_http_info(**kwargs)
return data
def create_group_with_http_info(self, **kwargs):
"""
Create Group
### Creates a new group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param Group body: Group
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_group" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group(self, group_id, **kwargs):
"""
Delete Group
### Deletes a group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_with_http_info(group_id, **kwargs)
else:
(data) = self.delete_group_with_http_info(group_id, **kwargs)
return data
def delete_group_with_http_info(self, group_id, **kwargs):
"""
Delete Group
### Deletes a group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `delete_group`")
collection_formats = {}
resource_path = '/groups/{group_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_from_group(self, group_id, deleting_group_id, **kwargs):
"""
Deletes a Group from Group
### Removes a group from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_from_group(group_id, deleting_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int deleting_group_id: Id of group to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_from_group_with_http_info(group_id, deleting_group_id, **kwargs)
else:
(data) = self.delete_group_from_group_with_http_info(group_id, deleting_group_id, **kwargs)
return data
def delete_group_from_group_with_http_info(self, group_id, deleting_group_id, **kwargs):
"""
Deletes a Group from Group
### Removes a group from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_from_group_with_http_info(group_id, deleting_group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int deleting_group_id: Id of group to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'deleting_group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `delete_group_from_group`")
# verify the required parameter 'deleting_group_id' is set
if ('deleting_group_id' not in params) or (params['deleting_group_id'] is None):
raise ValueError("Missing the required parameter `deleting_group_id` when calling `delete_group_from_group`")
collection_formats = {}
resource_path = '/groups/{group_id}/groups/{deleting_group_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
if 'deleting_group_id' in params:
path_params['deleting_group_id'] = params['deleting_group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group_user(self, group_id, user_id, **kwargs):
"""
Remove a User from Group
### Removes a user from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_user(group_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_id: Id of user to remove from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_group_user_with_http_info(group_id, user_id, **kwargs)
else:
(data) = self.delete_group_user_with_http_info(group_id, user_id, **kwargs)
return data
def delete_group_user_with_http_info(self, group_id, user_id, **kwargs):
"""
Remove a User from Group
### Removes a user from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_group_user_with_http_info(group_id, user_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_id: Id of user to remove from group (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'user_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group_user" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `delete_group_user`")
# verify the required parameter 'user_id' is set
if ('user_id' not in params) or (params['user_id'] is None):
raise ValueError("Missing the required parameter `user_id` when calling `delete_group_user`")
collection_formats = {}
resource_path = '/groups/{group_id}/users/{user_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
if 'user_id' in params:
path_params['user_id'] = params['user_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user_attribute_group_value(self, group_id, user_attribute_id, **kwargs):
"""
Delete User Attribute Group Value
### Remove a user attribute value from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user_attribute_group_value(group_id, user_attribute_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_attribute_id: Id of user attribute (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_user_attribute_group_value_with_http_info(group_id, user_attribute_id, **kwargs)
else:
(data) = self.delete_user_attribute_group_value_with_http_info(group_id, user_attribute_id, **kwargs)
return data
def delete_user_attribute_group_value_with_http_info(self, group_id, user_attribute_id, **kwargs):
"""
Delete User Attribute Group Value
### Remove a user attribute value from a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_user_attribute_group_value_with_http_info(group_id, user_attribute_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_attribute_id: Id of user attribute (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'user_attribute_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user_attribute_group_value" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `delete_user_attribute_group_value`")
# verify the required parameter 'user_attribute_id' is set
if ('user_attribute_id' not in params) or (params['user_attribute_id'] is None):
raise ValueError("Missing the required parameter `user_attribute_id` when calling `delete_user_attribute_group_value`")
collection_formats = {}
resource_path = '/groups/{group_id}/attribute_values/{user_attribute_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
if 'user_attribute_id' in params:
path_params['user_attribute_id'] = params['user_attribute_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def group(self, group_id, **kwargs):
"""
Get Group
### Get information about a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.group(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.group_with_http_info(group_id, **kwargs)
else:
(data) = self.group_with_http_info(group_id, **kwargs)
return data
def group_with_http_info(self, group_id, **kwargs):
"""
Get Group
### Get information about a group.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.group_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `group`")
collection_formats = {}
resource_path = '/groups/{group_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_group(self, group_id, body, **kwargs):
"""
Update Group
### Updates the a group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group(group_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param Group body: Group (required)
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_group_with_http_info(group_id, body, **kwargs)
else:
(data) = self.update_group_with_http_info(group_id, body, **kwargs)
return data
def update_group_with_http_info(self, group_id, body, **kwargs):
"""
Update Group
### Updates the a group (admin only).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_group_with_http_info(group_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param Group body: Group (required)
:param str fields: Requested fields.
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'body', 'fields']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `update_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_group`")
collection_formats = {}
resource_path = '/groups/{group_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
if 'fields' in params:
query_params['fields'] = params['fields']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user_attribute_group_value(self, group_id, user_attribute_id, body, **kwargs):
"""
Set User Attribute Group Value
### Set the value of a user attribute for a group. For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_user_attribute_group_value(group_id, user_attribute_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_attribute_id: Id of user attribute (required)
:param UserAttributeGroupValue body: New value for group. (required)
:return: UserAttributeGroupValue
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_user_attribute_group_value_with_http_info(group_id, user_attribute_id, body, **kwargs)
else:
(data) = self.update_user_attribute_group_value_with_http_info(group_id, user_attribute_id, body, **kwargs)
return data
def update_user_attribute_group_value_with_http_info(self, group_id, user_attribute_id, body, **kwargs):
"""
Set User Attribute Group Value
### Set the value of a user attribute for a group. For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values).
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_user_attribute_group_value_with_http_info(group_id, user_attribute_id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Id of group (required)
:param int user_attribute_id: Id of user attribute (required)
:param UserAttributeGroupValue body: New value for group. (required)
:return: UserAttributeGroupValue
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'user_attribute_id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user_attribute_group_value" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `update_user_attribute_group_value`")
# verify the required parameter 'user_attribute_id' is set
if ('user_attribute_id' not in params) or (params['user_attribute_id'] is None):
raise ValueError("Missing the required parameter `user_attribute_id` when calling `update_user_attribute_group_value`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_user_attribute_group_value`")
collection_formats = {}
resource_path = '/groups/{group_id}/attribute_values/{user_attribute_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
if 'user_attribute_id' in params:
path_params['user_attribute_id'] = params['user_attribute_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api(resource_path, 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserAttributeGroupValue',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.521512
| 1,625
| 0.574908
| 7,131
| 66,762
| 5.138129
| 0.039265
| 0.039356
| 0.019869
| 0.025546
| 0.946616
| 0.939056
| 0.930022
| 0.919132
| 0.913919
| 0.910862
| 0
| 0.000273
| 0.34265
| 66,762
| 1,533
| 1,626
| 43.549902
| 0.834617
| 0.336614
| 0
| 0.817942
| 0
| 0
| 0.175939
| 0.039439
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03562
| false
| 0
| 0.009235
| 0
| 0.097625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f7e8ac0e02c337e7aebae741951149fb43373b1a
| 130
|
py
|
Python
|
icevision/models/ross/efficientdet/fastai/__init__.py
|
ai-fast-track/mantisshrimp
|
cc6d6a4a048f6ddda2782b6593dcd6b083a673e4
|
[
"Apache-2.0"
] | 580
|
2020-09-10T06:29:57.000Z
|
2022-03-29T19:34:54.000Z
|
icevision/models/ross/efficientdet/fastai/__init__.py
|
ai-fast-track/mantisshrimp
|
cc6d6a4a048f6ddda2782b6593dcd6b083a673e4
|
[
"Apache-2.0"
] | 691
|
2020-09-05T03:08:34.000Z
|
2022-03-31T23:47:06.000Z
|
icevision/models/ross/efficientdet/fastai/__init__.py
|
lgvaz/mantisshrimp2
|
743cb7df0dae7eb1331fc2bb66fc9ca09db496cd
|
[
"Apache-2.0"
] | 105
|
2020-09-09T10:41:35.000Z
|
2022-03-25T17:16:49.000Z
|
from icevision.models.ross.efficientdet.fastai.callbacks import *
from icevision.models.ross.efficientdet.fastai.learner import *
| 43.333333
| 65
| 0.846154
| 16
| 130
| 6.875
| 0.5625
| 0.236364
| 0.345455
| 0.418182
| 0.745455
| 0.745455
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 130
| 2
| 66
| 65
| 0.901639
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
f7fd7c10f311b68e9ad647397f97c4d32e016396
| 2,329
|
py
|
Python
|
tests/localization/color/utils/test_color_converter.py
|
Lukasz1928/mobile-robots-control
|
81820b35dab10b14f58d66079b0a8f82ef819bee
|
[
"MIT"
] | 2
|
2018-06-28T08:07:06.000Z
|
2018-07-14T10:00:31.000Z
|
tests/localization/color/utils/test_color_converter.py
|
Lukasz1928/mobile-robots-control
|
81820b35dab10b14f58d66079b0a8f82ef819bee
|
[
"MIT"
] | 6
|
2018-10-15T11:00:13.000Z
|
2018-12-19T18:06:49.000Z
|
tests/localization/color/utils/test_color_converter.py
|
Lukasz1928/mobile-robots-control
|
81820b35dab10b14f58d66079b0a8f82ef819bee
|
[
"MIT"
] | null | null | null |
import cv2
from unittest import TestCase
import numpy as np
from parameterized import parameterized
from mrc.localization.color.utils.color_converter import ColorConverter
from tests.test_utils.read_image import read_image
class TestColorConverterGrayscale(TestCase):
def setUp(self):
self.converter = ColorConverter()
self.imageBGR = read_image('localization/color/utils/color_conversion/gray/source.png')
self.imageRGB = cv2.cvtColor(self.imageBGR, cv2.COLOR_BGR2RGB)
self.expected_grayscale = read_image('localization/color/utils/color_conversion/gray/gray.png')[:, :, 0]
def test_BGR_to_Grayscale(self):
grayscale = self.converter.convert_to_grayscale(self.imageBGR, 'BGR')
np.testing.assert_array_equal(grayscale, self.expected_grayscale)
def test_RGB_to_Grayscale(self):
grayscale = self.converter.convert_to_grayscale(self.imageRGB, 'RGB')
np.testing.assert_array_equal(grayscale, self.expected_grayscale)
def test_BGR_to_Grayscale_special(self):
grayscale = self.converter.convert_to_grayscale(self.imageBGR, 'BGR')
np.testing.assert_array_equal(grayscale, self.expected_grayscale)
def test_RGB_to_Grayscale_special(self):
grayscale = self.converter.convert_to_grayscale(self.imageBGR, 'BGR')
np.testing.assert_array_equal(grayscale, self.expected_grayscale)
class TestColorConverterBinary(TestCase):
def setUp(self):
self.converter = ColorConverter()
self.imageBGR = read_image('localization/color/utils/color_conversion/binary/source.png')
self.imageRGB = cv2.cvtColor(self.imageBGR, cv2.COLOR_BGR2RGB)
self.expected_images = [read_image('localization/color/utils/color_conversion/binary/{}.png'.format(i))[:, :, 0] for i in
range(9)]
@parameterized.expand([[i] for i in range(9)])
def test_BGR_to_binary(self, i):
binary = self.converter.convert_to_binary(self.imageBGR, i / 8 * 255, 'BGR')
np.testing.assert_array_equal(binary, self.expected_images[i])
@parameterized.expand([[i] for i in range(9)])
def test_RGB_to_binary(self, i):
binary = self.converter.convert_to_binary(self.imageRGB, i / 8 * 255, 'RGB')
np.testing.assert_array_equal(binary, self.expected_images[i])
| 44.788462
| 129
| 0.729927
| 298
| 2,329
| 5.483221
| 0.187919
| 0.111383
| 0.05508
| 0.080783
| 0.80049
| 0.78519
| 0.779682
| 0.779682
| 0.711138
| 0.711138
| 0
| 0.010251
| 0.162301
| 2,329
| 51
| 130
| 45.666667
| 0.827268
| 0
| 0
| 0.435897
| 0
| 0
| 0.104766
| 0.097037
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.205128
| false
| 0
| 0.153846
| 0
| 0.410256
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
792382198d77ace4a94683fb5d4dac2847bdd6f8
| 1,417
|
py
|
Python
|
src/emails/constants.py
|
tabifier/template-django
|
0d2d2ddbc0b53cea8a1ed12d531bf30c231f5185
|
[
"MIT"
] | 1
|
2016-05-09T06:14:39.000Z
|
2016-05-09T06:14:39.000Z
|
src/emails/constants.py
|
tabifier/template-django
|
0d2d2ddbc0b53cea8a1ed12d531bf30c231f5185
|
[
"MIT"
] | null | null | null |
src/emails/constants.py
|
tabifier/template-django
|
0d2d2ddbc0b53cea8a1ed12d531bf30c231f5185
|
[
"MIT"
] | null | null | null |
class EmailTypes(object):
AUTH_WELCOME_EMAIL = "auth__welcome_email"
AUTH_VERIFY_SIGNUP_EMAIL = "auth__verify_signup_email"
EMAILS = {}
EMAILS[EmailTypes.AUTH_WELCOME_EMAIL] = {
"is_active": False,
"html_template": "emails/action-template.html",
"text_template": "emails/action-template.txt",
"subject": "",
"sender": "EMAIL SUBJECT",
"message": u"""EMAIL MESSAGE""",
"title": u"Title",
"title_color": "",
"signature": {
"sign_off": "Best,",
"name": "First Last Name",
"email": "first@djangoapp.com",
"email_subject": "",
"tile": "",
},
"cta_i": {
"button_title": "Confirm Email Address",
"button_color": "",
"button_link": "",
"message": "",
}
}
EMAILS[EmailTypes.AUTH_VERIFY_SIGNUP_EMAIL] = {
"is_active": False,
"html_template": "emails/action-template.html",
"text_template": "emails/action-template.txt",
"subject": "",
"sender": "EMAIL SUBJECT",
"message": u"""EMAIL MESSAGE""",
"title": u"Title",
"title_color": "",
"signature": {
"sign_off": "Best,",
"name": "First Last Name",
"email": "first@djangoapp.com",
"email_subject": "",
"tile": "",
},
"cta_i": {
"button_title": "Confirm Email Address",
"button_color": "",
"button_link": "",
"message": "",
}
}
| 25.303571
| 58
| 0.551164
| 139
| 1,417
| 5.352518
| 0.280576
| 0.075269
| 0.107527
| 0.150538
| 0.848118
| 0.784946
| 0.784946
| 0.784946
| 0.784946
| 0.784946
| 0
| 0
| 0.256881
| 1,417
| 55
| 59
| 25.763636
| 0.706553
| 0
| 0
| 0.76
| 0
| 0
| 0.461538
| 0.092449
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.06
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7021558695aa1fdcaa5b4d0bb723146eff9bfc1
| 9,127
|
py
|
Python
|
ec2_compare/internal/instance_type/a.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/a.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | null | null | null |
ec2_compare/internal/instance_type/a.py
|
weldpua2008/aws.ec2.compare
|
5149fc4c7cb42f4d7df1930ed8a06750155fe578
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Automatically generated
# pylint: disable=all
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1], 'SizeInMiB': 2048, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.medium', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 1, 'DefaultCores': 1, 'DefaultThreadsPerCore': 1, 'ValidCores': [1], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 2048}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 2, 'Ipv4AddressesPerInterface': 4, 'Ipv6AddressesPerInterface': 4, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [2], 'ValidThreadsPerCore': [1], 'SizeInMiB': 4096, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.large', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 2, 'DefaultCores': 2, 'DefaultThreadsPerCore': 1, 'ValidCores': [2], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 4096}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 3, 'Ipv4AddressesPerInterface': 10, 'Ipv6AddressesPerInterface': 10, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [4], 'ValidThreadsPerCore': [1], 'SizeInMiB': 8192, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 4, 'DefaultCores': 4, 'DefaultThreadsPerCore': 1, 'ValidCores': [4], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 8192}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [8], 'ValidThreadsPerCore': [1], 'SizeInMiB': 16384, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.2xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 8, 'DefaultCores': 8, 'DefaultThreadsPerCore': 1, 'ValidCores': [8], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 16384}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 4, 'Ipv4AddressesPerInterface': 15, 'Ipv6AddressesPerInterface': 15, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [16], 'ValidThreadsPerCore': [1], 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.4xlarge', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': False, 'Hypervisor': 'nitro', 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16, 'DefaultCores': 16, 'DefaultThreadsPerCore': 1, 'ValidCores': [16], 'ValidThreadsPerCore': [1]}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}] # noqa: E501
def get_instances_list() -> list:
'''Returns list EC2 instances with InstanceType = a .'''
# pylint: disable=all
return get
| 760.583333
| 8,938
| 0.739016
| 685
| 9,127
| 9.843796
| 0.129927
| 0.04805
| 0.090761
| 0.09254
| 0.952098
| 0.952098
| 0.944238
| 0.944238
| 0.944238
| 0.931188
| 0
| 0.03434
| 0.074723
| 9,127
| 11
| 8,939
| 829.727273
| 0.764121
| 0.013805
| 0
| 0
| 1
| 0
| 0.688577
| 0.278278
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 12
|
f7307823c40ee64d03118ec461eb3103c01b9c29
| 5,544
|
py
|
Python
|
Heats/scoresheetsHtml.py
|
yuxuibbs/MCC-Competition-Docs
|
384726c41434c5a07becb6438c3d2409c6ca6eb4
|
[
"MIT"
] | 4
|
2016-11-13T20:49:33.000Z
|
2017-12-20T20:03:03.000Z
|
Heats/scoresheetsHtml.py
|
yuxuibbs/MCC-Competition-Docs
|
384726c41434c5a07becb6438c3d2409c6ca6eb4
|
[
"MIT"
] | 5
|
2016-12-26T19:14:46.000Z
|
2022-02-11T03:44:39.000Z
|
Heats/scoresheetsHtml.py
|
yuxuibbs/MCC-Competition-Docs
|
384726c41434c5a07becb6438c3d2409c6ca6eb4
|
[
"MIT"
] | 2
|
2016-12-29T12:03:15.000Z
|
2017-02-16T15:51:02.000Z
|
startHTML = '''
<html>
<head>
<style>
table {
border-collapse: collapse;
height: 100%;
width: 100%;
}
table, th, td {
border: 3px solid black;
}
@media print {
table {
page-break-after: always;
}
}
.cutoffs td {
border: 0;
font-weight: bold;
}
.compName {
font-size: 48pt;
font-weight: bold;
}
.labels {
font-size: 24pt;
font-weight: bold;
}
.attempt {
font-size: 36pt;
font-weight: bold;
text-align: center;
}
.event, .personID, .scrambler {
font-size: 24pt;
font-weight: bold;
width: 60px;
}
.round, .heat {
font-size: 24pt;
font-weight: bold;
}
.personName {
font-size: 40pt;
font-weight: bold;
}
.attemptNumber {
width: 60px;
}
.initial {
width: 100px;
}
</style>
</head>
<body>
'''
ao5Table = '''
<table>
<tr>
<th colspan="6" class="compName">competitionName</th>
</tr>
<tr>
<th colspan="1" class="personID">competitorID</th>
<th colspan="3" class="event">eventName</th>
<th colspan="1" class="heat">G: heatNumber</th>
<th colspan="1" class="round">R: roundNumber</th>
</tr>
<tr>
<th colspan="6" class="personName">competitorName</th>
</tr>
<tr class="labels">
<th colspan="1" class="scrambler">Scr</th>
<th colspan="1" class="attemptNumber">#</th>
<th colspan="2">Results</th>
<th colspan="1" class="initial">Judge</th>
<th colspan="1" class="initial">Comp</th>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">1</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">2</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="cutoffs">
<td colspan="1"></td>
<td colspan="1"></td>
<td colspan="1">Cutoff: cutoffTime</td>
<td colspan="1">Time Limit: timeLimit</td>
<td colspan="1"></td>
<td colspan="1"></td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">3</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">4</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">5</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="empty">
<td colspan="6"></td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">E</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
</table>
'''
mo3Table = '''
<table>
<tr>
<th colspan="6" class="compName">competitionName</th>
</tr>
<tr>
<th colspan="1" class="personID">competitorID</th>
<th colspan="3" class="event">eventName</th>
<th colspan="1" class="heat">G: heatNumber</th>
<th colspan="1" class="round">R: roundNumber</th>
</tr>
<tr>
<th colspan="6" class="personName">competitorName</th>
</tr>
<tr class="labels">
<th colspan="1" class="scrambler">Scr</th>
<th colspan="1" class="attemptNumber">#</th>
<th colspan="2">Results</th>
<th colspan="1" class="initial">Judge</th>
<th colspan="1" class="initial">Comp</th>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">1</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="cutoffs">
<td colspan="1"></td>
<td colspan="1"></td>
<td colspan="1">Cutoff: cutoffTime</td>
<td colspan="1">Time Limit: timeLimit</td>
<td colspan="1"></td>
<td colspan="1"></td>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">2</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">3</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
<tr class="empty">
<td colspan="6"></td>
</tr>
<tr class="attempt">
<td colspan="1"> </td>
<td colspan="1">E</td>
<td colspan="2"> </td>
<td colspan="1"> </td>
<td colspan="1"> </td>
</tr>
</table>
'''
endHTML = '''
</body>
</html>
'''
| 25.906542
| 64
| 0.418831
| 604
| 5,544
| 3.844371
| 0.130795
| 0.22739
| 0.223945
| 0.206718
| 0.824289
| 0.824289
| 0.790698
| 0.790698
| 0.790698
| 0.790698
| 0
| 0.036155
| 0.386364
| 5,544
| 214
| 65
| 25.906542
| 0.646384
| 0
| 0
| 0.752525
| 0
| 0
| 0.986114
| 0.095942
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.005051
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f7481b1f82e69e61bcac214242e583154bd7b7cc
| 36
|
py
|
Python
|
j4j_spawner/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_spawner/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
j4j_spawner/__init__.py
|
FZJ-JSC/jupyter-jsc-jupyterhub-collection
|
3fbb83da6e356df57bbdd24269157944f7fcd2a5
|
[
"BSD-3-Clause"
] | null | null | null |
from .j4j_spawner import J4J_Spawner
| 36
| 36
| 0.888889
| 6
| 36
| 5
| 0.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 0.083333
| 36
| 1
| 36
| 36
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f76dbab7b84c0ea2144dc04b04793890b3e37c64
| 269
|
py
|
Python
|
src/manager/__init__.py
|
DeepLearnPhysics/larcv-viewer
|
5d5eb65e409e80a2ee229464bb7b714deec05462
|
[
"MIT"
] | 1
|
2021-07-13T13:34:20.000Z
|
2021-07-13T13:34:20.000Z
|
src/manager/__init__.py
|
DeepLearnPhysics/larcv-viewer
|
5d5eb65e409e80a2ee229464bb7b714deec05462
|
[
"MIT"
] | 3
|
2018-01-17T16:13:23.000Z
|
2018-01-17T17:28:02.000Z
|
src/manager/__init__.py
|
DeepLearnPhysics/larcv-viewer
|
5d5eb65e409e80a2ee229464bb7b714deec05462
|
[
"MIT"
] | 1
|
2019-09-24T20:43:01.000Z
|
2019-09-24T20:43:01.000Z
|
from .event_meta import event_meta
from .evd_manager_base import evd_manager_base
from .evd_manager_2D import evd_manager_2D
from .evd_manager_3D import evd_manager_3D
try:
import pyqtgraph.opengl as gl
# from .evdmanager import evd_manager_3D
except:
pass
| 26.9
| 46
| 0.817844
| 44
| 269
| 4.636364
| 0.386364
| 0.343137
| 0.313725
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021834
| 0.148699
| 269
| 9
| 47
| 29.888889
| 0.868996
| 0.141264
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.125
| 0.625
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
e3cb266c98fd7f9a6aebc01393588c569e0b9442
| 2,707
|
py
|
Python
|
mySite/applications/myList/migrations/0001_initial.py
|
ALittleMoron/django_mySite
|
b0cedcc31d9016a862015bc7da0de7ff09441e8b
|
[
"Unlicense"
] | null | null | null |
mySite/applications/myList/migrations/0001_initial.py
|
ALittleMoron/django_mySite
|
b0cedcc31d9016a862015bc7da0de7ff09441e8b
|
[
"Unlicense"
] | null | null | null |
mySite/applications/myList/migrations/0001_initial.py
|
ALittleMoron/django_mySite
|
b0cedcc31d9016a862015bc7da0de7ff09441e8b
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.2.7 on 2021-10-12 16:31
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Film',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True, verbose_name='Название')),
('native_name', models.CharField(max_length=150, unique=True, verbose_name='Название на родном языке')),
('description', models.TextField(verbose_name='Описание')),
('opinion', models.TextField(verbose_name='Мое мнение')),
('slug', models.SlugField(max_length=160, null=True, verbose_name='Слаг')),
('rating', models.IntegerField(choices=[(0, 'Не указан'), (1, 'Хуже некуда'), (2, 'Ужасно'), (3, 'Очень плохо'), (4, 'Плохо'), (5, 'Более-менее'), (6, 'Нормально'), (7, 'Хорошо'), (8, 'Очень хорошо'), (9, 'Великолепно'), (10, 'Шедевр')], default=0, verbose_name='Оценка')),
('recomend_to_watch', models.BooleanField(default=False, verbose_name='Рекомендую к ознакомлению')),
('is_anime', models.BooleanField(default=False, verbose_name='Аниме')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Series',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True, verbose_name='Название')),
('native_name', models.CharField(max_length=150, unique=True, verbose_name='Название на родном языке')),
('description', models.TextField(verbose_name='Описание')),
('opinion', models.TextField(verbose_name='Мое мнение')),
('slug', models.SlugField(max_length=160, null=True, verbose_name='Слаг')),
('rating', models.IntegerField(choices=[(0, 'Не указан'), (1, 'Хуже некуда'), (2, 'Ужасно'), (3, 'Очень плохо'), (4, 'Плохо'), (5, 'Более-менее'), (6, 'Нормально'), (7, 'Хорошо'), (8, 'Очень хорошо'), (9, 'Великолепно'), (10, 'Шедевр')], default=0, verbose_name='Оценка')),
('recomend_to_watch', models.BooleanField(default=False, verbose_name='Рекомендую к ознакомлению')),
('is_anime', models.BooleanField(default=False, verbose_name='Аниме')),
],
options={
'abstract': False,
},
),
]
| 55.244898
| 289
| 0.582933
| 284
| 2,707
| 5.429577
| 0.334507
| 0.128405
| 0.062257
| 0.057069
| 0.874189
| 0.874189
| 0.874189
| 0.874189
| 0.874189
| 0.874189
| 0
| 0.028907
| 0.246029
| 2,707
| 48
| 290
| 56.395833
| 0.726605
| 0.016624
| 0
| 0.731707
| 1
| 0
| 0.204511
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02439
| 0
| 0.121951
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e3e0458a25e37519c6e2fbf6924a775202905940
| 15,716
|
py
|
Python
|
tests/api/v3_0_0/test_network_access_authorization_global_exception_rules.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 36
|
2021-05-18T16:24:19.000Z
|
2022-03-05T13:44:41.000Z
|
tests/api/v3_0_0/test_network_access_authorization_global_exception_rules.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 15
|
2021-06-08T19:03:37.000Z
|
2022-02-25T14:47:33.000Z
|
tests/api/v3_0_0/test_network_access_authorization_global_exception_rules.py
|
CiscoISE/ciscoisesdk
|
860b0fc7cc15d0c2a39c64608195a7ab3d5f4885
|
[
"MIT"
] | 6
|
2021-06-10T09:32:01.000Z
|
2022-01-12T08:34:39.000Z
|
# -*- coding: utf-8 -*-
"""IdentityServicesEngineAPI network_access_authorization_global_exception_rules API fixtures and tests.
Copyright (c) 2021 Cisco and/or its affiliates.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
from fastjsonschema.exceptions import JsonSchemaException
from ciscoisesdk.exceptions import MalformedRequest
from ciscoisesdk.exceptions import ciscoisesdkException
from tests.environment import IDENTITY_SERVICES_ENGINE_VERSION
pytestmark = pytest.mark.skipif(IDENTITY_SERVICES_ENGINE_VERSION != '3.0.0', reason='version does not match')
def is_valid_get_network_access_policy_set_global_exception_rules(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_19a11a1ff1ee5387b669bcde99f86fbf_v3_0_0').validate(obj.response)
return True
def get_network_access_policy_set_global_exception_rules(api):
endpoint_result = api.network_access_authorization_global_exception_rules.get_network_access_policy_set_global_exception_rules(
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_get_network_access_policy_set_global_exception_rules(api, validator):
try:
assert is_valid_get_network_access_policy_set_global_exception_rules(
validator,
get_network_access_policy_set_global_exception_rules(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_network_access_policy_set_global_exception_rules_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.get_network_access_policy_set_global_exception_rules(
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_get_network_access_policy_set_global_exception_rules_default(api, validator):
try:
assert is_valid_get_network_access_policy_set_global_exception_rules(
validator,
get_network_access_policy_set_global_exception_rules_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_create_network_access_policy_set_global_exception_rule(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_3c5c9b7ab72b5442ae7026a5dcc0fec3_v3_0_0').validate(obj.response)
return True
def create_network_access_policy_set_global_exception_rule(api):
endpoint_result = api.network_access_authorization_global_exception_rules.create_network_access_policy_set_global_exception_rule(
active_validation=False,
link={'href': 'string', 'rel': 'string', 'type': 'string'},
payload=None,
profile=['string'],
rule={'condition': {'conditionType': 'string', 'isNegate': True, 'link': {'href': 'string', 'rel': 'string', 'type': 'string'}, 'description': 'string', 'id': 'string', 'name': 'string', 'attributeName': 'string', 'attributeId': 'string', 'attributeValue': 'string', 'dictionaryName': 'string', 'dictionaryValue': 'string', 'operator': 'string', 'children': [{'conditionType': 'string', 'isNegate': True, 'link': {'href': 'string', 'rel': 'string', 'type': 'string'}}], 'datesRange': {'endDate': 'string', 'startDate': 'string'}, 'datesRangeException': {'endDate': 'string', 'startDate': 'string'}, 'hoursRange': {'endTime': 'string', 'startTime': 'string'}, 'hoursRangeException': {'endTime': 'string', 'startTime': 'string'}, 'weekDays': ['string'], 'weekDaysException': ['string']}, 'default': True, 'hitCounts': 0, 'id': 'string', 'name': 'string', 'rank': 0, 'state': 'string'},
security_group='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_create_network_access_policy_set_global_exception_rule(api, validator):
try:
assert is_valid_create_network_access_policy_set_global_exception_rule(
validator,
create_network_access_policy_set_global_exception_rule(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def create_network_access_policy_set_global_exception_rule_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.create_network_access_policy_set_global_exception_rule(
active_validation=False,
link=None,
payload=None,
profile=None,
rule=None,
security_group=None
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_create_network_access_policy_set_global_exception_rule_default(api, validator):
try:
assert is_valid_create_network_access_policy_set_global_exception_rule(
validator,
create_network_access_policy_set_global_exception_rule_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_reset_hit_counts_network_access_global_exceptions(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_f2a4d5ef4e915ff8aac91b666fc86326_v3_0_0').validate(obj.response)
return True
def reset_hit_counts_network_access_global_exceptions(api):
endpoint_result = api.network_access_authorization_global_exception_rules.reset_hit_counts_network_access_global_exceptions(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_reset_hit_counts_network_access_global_exceptions(api, validator):
try:
assert is_valid_reset_hit_counts_network_access_global_exceptions(
validator,
reset_hit_counts_network_access_global_exceptions(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def reset_hit_counts_network_access_global_exceptions_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.reset_hit_counts_network_access_global_exceptions(
active_validation=False,
payload=None
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_reset_hit_counts_network_access_global_exceptions_default(api, validator):
try:
assert is_valid_reset_hit_counts_network_access_global_exceptions(
validator,
reset_hit_counts_network_access_global_exceptions_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_get_network_access_policy_set_global_exception_rule_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_c14128e5729b55e9b1feb638a8295e10_v3_0_0').validate(obj.response)
return True
def get_network_access_policy_set_global_exception_rule_by_id(api):
endpoint_result = api.network_access_authorization_global_exception_rules.get_network_access_policy_set_global_exception_rule_by_id(
id='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_get_network_access_policy_set_global_exception_rule_by_id(api, validator):
try:
assert is_valid_get_network_access_policy_set_global_exception_rule_by_id(
validator,
get_network_access_policy_set_global_exception_rule_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def get_network_access_policy_set_global_exception_rule_by_id_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.get_network_access_policy_set_global_exception_rule_by_id(
id='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_get_network_access_policy_set_global_exception_rule_by_id_default(api, validator):
try:
assert is_valid_get_network_access_policy_set_global_exception_rule_by_id(
validator,
get_network_access_policy_set_global_exception_rule_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_update_network_access_policy_set_global_exception_rule_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_ac171b8ccf79502fbc4b35909970a1cb_v3_0_0').validate(obj.response)
return True
def update_network_access_policy_set_global_exception_rule_by_id(api):
endpoint_result = api.network_access_authorization_global_exception_rules.update_network_access_policy_set_global_exception_rule_by_id(
active_validation=False,
id='string',
link={'href': 'string', 'rel': 'string', 'type': 'string'},
payload=None,
profile=['string'],
rule={'condition': {'conditionType': 'string', 'isNegate': True, 'link': {'href': 'string', 'rel': 'string', 'type': 'string'}, 'description': 'string', 'id': 'string', 'name': 'string', 'attributeName': 'string', 'attributeId': 'string', 'attributeValue': 'string', 'dictionaryName': 'string', 'dictionaryValue': 'string', 'operator': 'string', 'children': [{'conditionType': 'string', 'isNegate': True, 'link': {'href': 'string', 'rel': 'string', 'type': 'string'}}], 'datesRange': {'endDate': 'string', 'startDate': 'string'}, 'datesRangeException': {'endDate': 'string', 'startDate': 'string'}, 'hoursRange': {'endTime': 'string', 'startTime': 'string'}, 'hoursRangeException': {'endTime': 'string', 'startTime': 'string'}, 'weekDays': ['string'], 'weekDaysException': ['string']}, 'default': True, 'hitCounts': 0, 'id': 'string', 'name': 'string', 'rank': 0, 'state': 'string'},
security_group='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_update_network_access_policy_set_global_exception_rule_by_id(api, validator):
try:
assert is_valid_update_network_access_policy_set_global_exception_rule_by_id(
validator,
update_network_access_policy_set_global_exception_rule_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def update_network_access_policy_set_global_exception_rule_by_id_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.update_network_access_policy_set_global_exception_rule_by_id(
active_validation=False,
id='string',
link=None,
payload=None,
profile=None,
rule=None,
security_group=None
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_update_network_access_policy_set_global_exception_rule_by_id_default(api, validator):
try:
assert is_valid_update_network_access_policy_set_global_exception_rule_by_id(
validator,
update_network_access_policy_set_global_exception_rule_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
def is_valid_delete_network_access_policy_set_global_exception_rule_by_id(json_schema_validate, obj):
if not obj:
return False
assert hasattr(obj, 'headers')
assert hasattr(obj, 'content')
assert hasattr(obj, 'text')
assert hasattr(obj, 'response')
json_schema_validate('jsd_836fd707ac0454be8fecc73a918a27b6_v3_0_0').validate(obj.response)
return True
def delete_network_access_policy_set_global_exception_rule_by_id(api):
endpoint_result = api.network_access_authorization_global_exception_rules.delete_network_access_policy_set_global_exception_rule_by_id(
id='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_delete_network_access_policy_set_global_exception_rule_by_id(api, validator):
try:
assert is_valid_delete_network_access_policy_set_global_exception_rule_by_id(
validator,
delete_network_access_policy_set_global_exception_rule_by_id(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest)):
print("ERROR: {error}".format(error=original_e))
raise original_e
def delete_network_access_policy_set_global_exception_rule_by_id_default(api):
endpoint_result = api.network_access_authorization_global_exception_rules.delete_network_access_policy_set_global_exception_rule_by_id(
id='string'
)
return endpoint_result
@pytest.mark.network_access_authorization_global_exception_rules
def test_delete_network_access_policy_set_global_exception_rule_by_id_default(api, validator):
try:
assert is_valid_delete_network_access_policy_set_global_exception_rule_by_id(
validator,
delete_network_access_policy_set_global_exception_rule_by_id_default(api)
)
except Exception as original_e:
with pytest.raises((JsonSchemaException, MalformedRequest, TypeError)):
raise original_e
| 44.270423
| 891
| 0.759672
| 1,903
| 15,716
| 5.839727
| 0.114556
| 0.106452
| 0.094034
| 0.108881
| 0.873122
| 0.873122
| 0.868982
| 0.868982
| 0.865113
| 0.839647
| 0
| 0.010723
| 0.157356
| 15,716
| 354
| 892
| 44.39548
| 0.828438
| 0.076164
| 0
| 0.708955
| 0
| 0
| 0.118592
| 0.017778
| 0
| 0
| 0
| 0
| 0.134328
| 1
| 0.11194
| false
| 0
| 0.018657
| 0
| 0.220149
| 0.022388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e3f65aa476aab5fec4ffe895495cff609ff44328
| 247
|
py
|
Python
|
coreli/__init__.py
|
tcosmo/coreli
|
21bdae9225033656ee14bb0598a64e926b61b93a
|
[
"MIT"
] | null | null | null |
coreli/__init__.py
|
tcosmo/coreli
|
21bdae9225033656ee14bb0598a64e926b61b93a
|
[
"MIT"
] | null | null | null |
coreli/__init__.py
|
tcosmo/coreli
|
21bdae9225033656ee14bb0598a64e926b61b93a
|
[
"MIT"
] | null | null | null |
from coreli.base_conversion_routines import *
from coreli.routines import *
from coreli.modular_routines import *
from coreli.parity_vectors import *
from coreli.predecessors import *
from coreli.padic_routines import *
from coreli.cycles import *
| 35.285714
| 45
| 0.834008
| 33
| 247
| 6.090909
| 0.363636
| 0.348259
| 0.477612
| 0.477612
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.109312
| 247
| 7
| 46
| 35.285714
| 0.913636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
540f1232120fac3a04cf000da3812bd837c464df
| 3,579
|
py
|
Python
|
example/dj/apps/issue_tracker/tests/field_permissions.py
|
Formulka/django-is-core
|
e7f3e32e1833f5fe246b4a0417edf78f470adfea
|
[
"BSD-3-Clause"
] | 16
|
2015-06-25T20:00:30.000Z
|
2021-11-24T17:27:02.000Z
|
example/dj/apps/issue_tracker/tests/field_permissions.py
|
Formulka/django-is-core
|
e7f3e32e1833f5fe246b4a0417edf78f470adfea
|
[
"BSD-3-Clause"
] | 65
|
2015-01-17T09:41:39.000Z
|
2022-02-24T14:30:14.000Z
|
example/dj/apps/issue_tracker/tests/field_permissions.py
|
Formulka/django-is-core
|
e7f3e32e1833f5fe246b4a0417edf78f470adfea
|
[
"BSD-3-Clause"
] | 19
|
2016-01-14T14:30:21.000Z
|
2021-12-30T17:46:23.000Z
|
from germanium.test_cases.default import GermaniumTestCase
from germanium.tools import assert_true, assert_false, assert_equal, assert_not_equal
from is_core.auth.permissions import FieldsListPermission, FieldsSetPermission, PermissionsSet, AllowAny
from .permissions import ObjIsNotNonePermission
__all__ =(
'FieldPermissionsTestCase',
)
class FieldPermissionsTestCase(GermaniumTestCase):
def test_fields_list_permission_sould_return_right_disallowed_fields(self):
field_list_permission = FieldsListPermission(
permission=PermissionsSet(
read=ObjIsNotNonePermission()
),
fields=('a', 'b', 'c')
)
assert_equal(
field_list_permission.get_disallowed_fields(None, None, None),
{'a', 'b', 'c'}
)
assert_equal(
field_list_permission.get_disallowed_fields(None, None, ''),
set()
)
def test_fields_list_permission_sould_return_right_readonly_fields(self):
field_list_permission = FieldsListPermission(
permission=PermissionsSet(
read=AllowAny(),
edit=ObjIsNotNonePermission()
),
fields=('a', 'b', 'c')
)
assert_equal(
field_list_permission.get_readonly_fields(None, None, None),
{'a', 'b', 'c'}
)
assert_equal(
field_list_permission.get_readonly_fields(None, None, ''),
set()
)
def test_fields_set_permission_sould_return_right_disallowed_fields(self):
field_set_permission = FieldsSetPermission(
FieldsListPermission(
permission=PermissionsSet(
read=ObjIsNotNonePermission()
),
fields=('a', 'b', 'c')
),
FieldsListPermission(
permission=PermissionsSet(
read=AllowAny(),
edit=ObjIsNotNonePermission()
),
fields=('a', 'b', 'd')
),
FieldsListPermission(
permission=PermissionsSet(
read=ObjIsNotNonePermission()
),
fields=('e',)
),
)
assert_equal(
field_set_permission.get_disallowed_fields(None, None, None),
{'a', 'b', 'c', 'e'}
)
assert_equal(
field_set_permission.get_disallowed_fields(None, None, ''),
set()
)
def test_fields_set_permission_sould_return_right_readonly_fields(self):
field_set_permission = FieldsSetPermission(
FieldsListPermission(
permission=PermissionsSet(
read=ObjIsNotNonePermission()
),
fields=('a', 'b', 'c')
),
FieldsListPermission(
permission=PermissionsSet(
read=AllowAny(),
edit=ObjIsNotNonePermission()
),
fields=('a', 'b', 'd')
),
FieldsListPermission(
permission=PermissionsSet(
read=ObjIsNotNonePermission()
),
fields=('e',)
),
)
assert_equal(
field_set_permission.get_readonly_fields(None, None, None),
{'a', 'b', 'c', 'd', 'e'}
)
assert_equal(
field_set_permission.get_readonly_fields(None, None, ''),
{'a', 'b', 'c', 'e'}
)
| 32.536364
| 104
| 0.539536
| 277
| 3,579
| 6.642599
| 0.166065
| 0.052174
| 0.014674
| 0.208696
| 0.820109
| 0.820109
| 0.817935
| 0.817935
| 0.758696
| 0.701087
| 0
| 0
| 0.362951
| 3,579
| 109
| 105
| 32.834862
| 0.807018
| 0
| 0
| 0.666667
| 0
| 0
| 0.017603
| 0.006706
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.040404
| false
| 0
| 0.040404
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5423803a2cc3b1a8539596382a4df2f95c337c39
| 9,702
|
py
|
Python
|
diffxpy/unit_test/test_partition.py
|
adkinsrs/diffxpy
|
0464d2ab1faa0947d90e29f93018ec678e050585
|
[
"BSD-3-Clause"
] | 111
|
2018-10-30T00:07:59.000Z
|
2022-03-29T04:33:46.000Z
|
diffxpy/unit_test/test_partition.py
|
adkinsrs/diffxpy
|
0464d2ab1faa0947d90e29f93018ec678e050585
|
[
"BSD-3-Clause"
] | 132
|
2018-10-27T01:43:58.000Z
|
2022-01-23T03:25:14.000Z
|
diffxpy/unit_test/test_partition.py
|
adkinsrs/diffxpy
|
0464d2ab1faa0947d90e29f93018ec678e050585
|
[
"BSD-3-Clause"
] | 31
|
2018-11-23T09:30:45.000Z
|
2021-07-22T01:54:25.000Z
|
import unittest
import logging
import numpy as np
import pandas as pd
import scipy.stats as stats
from batchglm.api.models.numpy.glm_nb import Simulator
import diffxpy.api as de
class TestPartitionNull(unittest.TestCase):
def test_null_distribution_wald(self, n_cells: int = 4000, n_genes: int = 200):
"""
Test if Partition.wald() generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distribution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observations per test).
:param n_genes: Number of genes to simulate (number of tests).
"""
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=2)
sim.generate()
sample_description = pd.DataFrame({
"covar1": np.random.randint(2, size=sim.nobs),
"covar2": np.random.randint(2, size=sim.nobs)
})
sample_description["cond"] = sim.sample_description["condition"].values
partition = de.test.partition(
data=sim.x,
parts="cond",
sample_description=sample_description
)
det = partition.wald(
factor_loc_totest="covar1",
formula_loc="~ 1 + covar1 + covar2",
training_strategy="DEFAULT",
dtype="float64"
)
_ = det.summary()
# Compare p-value distribution under null model against uniform distribution.
pval_h0 = stats.kstest(det.pval.flatten(), 'uniform').pvalue
logging.getLogger("diffxpy").info('KS-test pvalue for null model match of wald(): %f' % pval_h0)
assert pval_h0 > 0.05, "KS-Test failed: pval_h0=%f is <= 0.05!" % np.round(pval_h0, 5)
return True
def test_null_distribution_wald_multi(self, n_cells: int = 4000, n_genes: int = 200):
"""
Test if de.wald() (multivariate mode) generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distribution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observations per test).
:param n_genes: Number of genes to simulate (number of tests).
"""
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=2)
sim.generate()
sample_description = pd.DataFrame({
"covar1": np.random.randint(4, size=sim.nobs),
"covar2": np.random.randint(2, size=sim.nobs)
})
sample_description["cond"] = sim.sample_description["condition"].values
partition = de.test.partition(
data=sim.x,
parts="cond",
sample_description=sample_description
)
det = partition.wald(
factor_loc_totest="covar1",
formula_loc="~ 1 + covar1 + covar2",
training_strategy="DEFAULT",
dtype="float64"
)
_ = det.summary()
# Compare p-value distribution under null model against uniform distribution.
pval_h0 = stats.kstest(det.pval.flatten(), 'uniform').pvalue
logging.getLogger("diffxpy").info('KS-test pvalue for null model match of wald(): %f' % pval_h0)
assert pval_h0 > 0.05, "KS-Test failed: pval_h0=%f is <= 0.05!" % np.round(pval_h0, 5)
return True
def test_null_distribution_lrt(self, n_cells: int = 4000, n_genes: int = 200):
"""
Test if de.lrt() generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distribution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observations per test).
:param n_genes: Number of genes to simulate (number of tests).
"""
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=2)
sim.generate()
sample_description = pd.DataFrame({
"covar1": np.random.randint(2, size=sim.nobs),
"covar2": np.random.randint(2, size=sim.nobs)
})
sample_description["cond"] = sim.sample_description["condition"].values
partition = de.test.partition(
data=sim.x,
parts="cond",
sample_description=sample_description
)
det = partition.lrt(
full_formula_loc="~ 1 + covar1",
full_formula_scale="~ 1",
reduced_formula_loc="~ 1",
reduced_formula_scale="~ 1",
training_strategy="DEFAULT",
dtype="float64"
)
_ = det.summary()
# Compare p-value distribution under null model against uniform distribution.
pval_h0 = stats.kstest(det.pval.flatten(), 'uniform').pvalue
logging.getLogger("diffxpy").info('KS-test pvalue for null model match of lrt(): %f' % pval_h0)
assert pval_h0 > 0.05, "KS-Test failed: pval_h0=%f is <= 0.05!" % np.round(pval_h0, 5)
return True
def test_null_distribution_ttest(self, n_cells: int = 4000, n_genes: int = 200):
"""
Test if de.t_test() generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distribution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observations per test).
:param n_genes: Number of genes to simulate (number of tests).
"""
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=2)
sim.generate()
sample_description = pd.DataFrame({
"covar1": np.random.randint(2, size=sim.nobs)
})
sample_description["cond"] = sim.sample_description["condition"].values
partition = de.test.partition(
data=sim.x,
parts="cond",
sample_description=sample_description
)
det = partition.t_test(
grouping="covar1",
is_logged=False,
dtype="float64"
)
summary = det.summary()
# Compare p-value distribution under null model against uniform distribution.
pval_h0 = stats.kstest(det.pval.flatten(), 'uniform').pvalue
logging.getLogger("diffxpy").info('KS-test pvalue for null model match of t_test(): %f' % pval_h0)
assert pval_h0 > 0.05, "KS-Test failed: pval_h0=%f is <= 0.05!" % np.round(pval_h0, 5)
return True
def test_null_distribution_rank(self, n_cells: int = 4000, n_genes: int = 200):
"""
Test if rank_test() generates a uniform p-value distribution
if it is given data simulated based on the null model. Returns the p-value
of the two-side Kolmgorov-Smirnov test for equality of the observed
p-value distribution and a uniform distribution.
:param n_cells: Number of cells to simulate (number of observations per test).
:param n_genes: Number of genes to simulate (number of tests).
"""
logging.getLogger("tensorflow").setLevel(logging.ERROR)
logging.getLogger("batchglm").setLevel(logging.WARNING)
logging.getLogger("diffxpy").setLevel(logging.WARNING)
sim = Simulator(num_observations=n_cells, num_features=n_genes)
sim.generate_sample_description(num_batches=0, num_conditions=2)
sim.generate()
sample_description = pd.DataFrame({
"covar1": np.random.randint(2, size=sim.nobs)
})
sample_description["cond"] = sim.sample_description["condition"].values
partition = de.test.partition(
data=sim.x,
parts="cond",
sample_description=sample_description
)
det = partition.rank_test(
grouping="covar1",
dtype="float64"
)
summary = det.summary()
# Compare p-value distribution under null model against uniform distribution.
pval_h0 = stats.kstest(det.pval.flatten(), 'uniform').pvalue
logging.getLogger("diffxpy").info('KS-test pvalue for null model match of rank_test(): %f' % pval_h0)
assert pval_h0 > 0.05, "KS-Test failed: pval_h0=%f is <= 0.05!" % np.round(pval_h0, 5)
return True
if __name__ == '__main__':
unittest.main()
| 40.764706
| 109
| 0.643063
| 1,215
| 9,702
| 5.001646
| 0.116049
| 0.083923
| 0.04443
| 0.02962
| 0.933026
| 0.927925
| 0.927925
| 0.927925
| 0.927925
| 0.927925
| 0
| 0.020163
| 0.253659
| 9,702
| 237
| 110
| 40.936709
| 0.819086
| 0.245413
| 0
| 0.748299
| 0
| 0
| 0.13141
| 0
| 0
| 0
| 0
| 0
| 0.034014
| 1
| 0.034014
| false
| 0
| 0.047619
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58181155eafc58102ee5c94d3437219775f52f36
| 14,071
|
py
|
Python
|
tests/e2e/test_e2e_skip_trained.py
|
vishalbelsare/emmental
|
040ff13752a8443485abe5f664d7e7df2f30f894
|
[
"MIT"
] | 75
|
2019-06-22T23:40:20.000Z
|
2021-06-05T19:12:15.000Z
|
tests/e2e/test_e2e_skip_trained.py
|
vishalbelsare/emmental
|
040ff13752a8443485abe5f664d7e7df2f30f894
|
[
"MIT"
] | 38
|
2019-07-02T21:05:28.000Z
|
2021-03-27T08:55:05.000Z
|
tests/e2e/test_e2e_skip_trained.py
|
vishalbelsare/emmental
|
040ff13752a8443485abe5f664d7e7df2f30f894
|
[
"MIT"
] | 20
|
2019-07-11T15:10:46.000Z
|
2021-05-09T14:03:53.000Z
|
"""Emmental e2e with skipping trained data."""
import logging
import shutil
from functools import partial
import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
from emmental import (
Action,
EmmentalDataLoader,
EmmentalDataset,
EmmentalLearner,
EmmentalModel,
EmmentalTask,
Meta,
Scorer,
init,
)
logger = logging.getLogger(__name__)
def test_e2e_skip_trained_step(caplog):
"""Run an end-to-end test."""
caplog.set_level(logging.INFO)
dirpath = "temp_test_e2e_skip_trained"
use_exact_log_path = True
Meta.reset()
init(dirpath, use_exact_log_path=use_exact_log_path)
# Generate synthetic data
N = 500
X = np.random.random((N, 2)) * 2 - 1
Y = (X[:, 0] > X[:, 1] + 0.25).astype(int)
X = [torch.Tensor(X[i]) for i in range(N)]
# Create dataset and dataloader
X_train, X_dev, X_test = (
X[: int(0.8 * N)],
X[int(0.8 * N) : int(0.9 * N)],
X[int(0.9 * N) :],
)
Y_train, Y_dev, Y_test = (
torch.tensor(Y[: int(0.8 * N)]),
torch.tensor(Y[int(0.8 * N) : int(0.9 * N)]),
torch.tensor(Y[int(0.9 * N) :]),
)
train_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_train},
Y_dict={"label1": Y_train},
)
dev_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_dev},
Y_dict={"label1": Y_dev},
)
test_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_test},
Y_dict={"label1": Y_test},
)
task_to_label_dict = {"task1": "label1"}
train_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=train_dataset,
split="train",
batch_size=10,
)
dev_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=dev_dataset,
split="valid",
batch_size=10,
)
test_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=test_dataset,
split="test",
batch_size=10,
)
# Create task
def ce_loss(task_name, immediate_output_dict, Y):
module_name = f"{task_name}_pred_head"
return F.cross_entropy(immediate_output_dict[module_name], Y)
def output(task_name, immediate_output_dict):
module_name = f"{task_name}_pred_head"
return F.softmax(immediate_output_dict[module_name], dim=1)
task_metrics = {"task1": ["accuracy"]}
class IdentityModule(nn.Module):
def __init__(self):
"""Initialize IdentityModule."""
super().__init__()
def forward(self, input):
return {"out": input}
tasks = [
EmmentalTask(
name=task_name,
module_pool=nn.ModuleDict(
{
"input_module0": IdentityModule(),
"input_module1": nn.Linear(2, 8),
f"{task_name}_pred_head": nn.Linear(8, 2),
}
),
task_flow=[
Action(
name="input", module="input_module0", inputs=[("_input_", "data")]
),
Action(
name="input1", module="input_module1", inputs=[("input", "out")]
),
Action(
name=f"{task_name}_pred_head",
module=f"{task_name}_pred_head",
inputs=[("input1", 0)],
),
],
module_device={"input_module0": -1},
loss_func=partial(ce_loss, task_name),
output_func=partial(output, task_name),
action_outputs=None,
scorer=Scorer(metrics=task_metrics[task_name]),
require_prob_for_eval=False,
require_pred_for_eval=True,
)
for task_name in ["task1"]
]
# Build model
model = EmmentalModel(name="all", tasks=tasks)
# Create learner
emmental_learner = EmmentalLearner()
config = {
"meta_config": {"seed": 0, "verbose": True},
"learner_config": {
"n_steps": 10,
"epochs_learned": 0,
"steps_learned": 0,
"skip_learned_data": False,
"online_eval": True,
"optimizer_config": {"lr": 0.01, "grad_clip": 100},
},
"logging_config": {
"counter_unit": "batch",
"evaluation_freq": 5,
"writer_config": {"writer": "json", "verbose": True},
"checkpointing": True,
"checkpointer_config": {
"checkpoint_path": None,
"checkpoint_freq": 1,
"checkpoint_metric": {"model/all/train/loss": "min"},
"checkpoint_task_metrics": None,
"checkpoint_runway": 1,
"checkpoint_all": False,
"clear_intermediate_checkpoints": True,
"clear_all_checkpoints": False,
},
},
}
Meta.update_config(config)
# Learning
emmental_learner.learn(
model,
[train_dataloader, dev_dataloader],
)
test_score = model.score(test_dataloader)
assert test_score["task1/synthetic/test/loss"] > 0.4
Meta.reset()
init(dirpath, use_exact_log_path=use_exact_log_path)
config = {
"meta_config": {"seed": 0, "verbose": True},
"learner_config": {
"n_steps": 40,
"epochs_learned": 0,
"steps_learned": 10,
"skip_learned_data": True,
"online_eval": False,
"optimizer_config": {"lr": 0.01, "grad_clip": 100},
"optimizer_path": (
f"{dirpath}/" "best_model_model_all_train_loss.optimizer.pth"
),
"scheduler_path": (
f"{dirpath}/" "best_model_model_all_train_loss.scheduler.pth"
),
},
"model_config": {
"model_path": f"{dirpath}/best_model_model_all_train_loss.model.pth"
},
"logging_config": {
"counter_unit": "batch",
"evaluation_freq": 5,
"writer_config": {"writer": "json", "verbose": True},
"checkpointing": True,
"checkpointer_config": {
"checkpoint_path": None,
"checkpoint_freq": 1,
"checkpoint_metric": {"model/all/train/loss": "min"},
"checkpoint_task_metrics": None,
"checkpoint_runway": 1,
"checkpoint_all": False,
"clear_intermediate_checkpoints": True,
"clear_all_checkpoints": False,
},
},
}
Meta.update_config(config)
if Meta.config["model_config"]["model_path"]:
model.load(Meta.config["model_config"]["model_path"])
# Learning
emmental_learner.learn(
model,
[train_dataloader, dev_dataloader],
)
test_score = model.score(test_dataloader)
assert test_score["task1/synthetic/test/loss"] <= 0.4
shutil.rmtree(dirpath)
def test_e2e_skip_trained_epoch(caplog):
"""Run an end-to-end test."""
caplog.set_level(logging.INFO)
dirpath = "temp_test_e2e_skip_trained"
use_exact_log_path = True
Meta.reset()
init(dirpath, use_exact_log_path=use_exact_log_path)
# Generate synthetic data
N = 500
X = np.random.random((N, 2)) * 2 - 1
Y = (X[:, 0] > X[:, 1] + 0.25).astype(int)
X = [torch.Tensor(X[i]) for i in range(N)]
# Create dataset and dataloader
X_train, X_dev, X_test = (
X[: int(0.8 * N)],
X[int(0.8 * N) : int(0.9 * N)],
X[int(0.9 * N) :],
)
Y_train, Y_dev, Y_test = (
torch.tensor(Y[: int(0.8 * N)]),
torch.tensor(Y[int(0.8 * N) : int(0.9 * N)]),
torch.tensor(Y[int(0.9 * N) :]),
)
train_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_train},
Y_dict={"label1": Y_train},
)
dev_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_dev},
Y_dict={"label1": Y_dev},
)
test_dataset = EmmentalDataset(
name="synthetic",
X_dict={"data": X_test},
Y_dict={"label1": Y_test},
)
task_to_label_dict = {"task1": "label1"}
train_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=train_dataset,
split="train",
batch_size=10,
)
dev_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=dev_dataset,
split="valid",
batch_size=10,
)
test_dataloader = EmmentalDataLoader(
task_to_label_dict=task_to_label_dict,
dataset=test_dataset,
split="test",
batch_size=10,
)
# Create task
def ce_loss(task_name, immediate_output_dict, Y):
module_name = f"{task_name}_pred_head"
return F.cross_entropy(immediate_output_dict[module_name], Y)
def output(task_name, immediate_output_dict):
module_name = f"{task_name}_pred_head"
return F.softmax(immediate_output_dict[module_name], dim=1)
task_metrics = {"task1": ["accuracy"]}
class IdentityModule(nn.Module):
def __init__(self):
"""Initialize IdentityModule."""
super().__init__()
def forward(self, input):
return {"out": input}
tasks = [
EmmentalTask(
name=task_name,
module_pool=nn.ModuleDict(
{
"input_module0": IdentityModule(),
"input_module1": nn.Linear(2, 8),
f"{task_name}_pred_head": nn.Linear(8, 2),
}
),
task_flow=[
Action(
name="input", module="input_module0", inputs=[("_input_", "data")]
),
Action(
name="input1", module="input_module1", inputs=[("input", "out")]
),
Action(
name=f"{task_name}_pred_head",
module=f"{task_name}_pred_head",
inputs=[("input1", 0)],
),
],
module_device={"input_module0": -1},
loss_func=partial(ce_loss, task_name),
output_func=partial(output, task_name),
action_outputs=None,
scorer=Scorer(metrics=task_metrics[task_name]),
require_prob_for_eval=False,
require_pred_for_eval=True,
)
for task_name in ["task1"]
]
# Build model
model = EmmentalModel(name="all", tasks=tasks)
# Create learner
emmental_learner = EmmentalLearner()
config = {
"meta_config": {"seed": 0, "verbose": True},
"learner_config": {
"n_epochs": 1,
"epochs_learned": 0,
"steps_learned": 0,
"skip_learned_data": False,
"online_eval": True,
"optimizer_config": {"lr": 0.01, "grad_clip": 100},
},
"logging_config": {
"counter_unit": "batch",
"evaluation_freq": 5,
"writer_config": {
"writer": "json",
"write_loss_per_step": True,
"verbose": True,
},
"checkpointing": True,
"checkpointer_config": {
"checkpoint_path": None,
"checkpoint_freq": 1,
"checkpoint_metric": {"model/all/train/loss": "min"},
"checkpoint_task_metrics": None,
"checkpoint_runway": 1,
"checkpoint_all": False,
"clear_intermediate_checkpoints": True,
"clear_all_checkpoints": False,
},
},
}
Meta.update_config(config)
# Learning
emmental_learner.learn(
model,
[train_dataloader, dev_dataloader],
)
test_score = model.score(test_dataloader)
assert test_score["task1/synthetic/test/loss"] > 0.3
Meta.reset()
init(dirpath, use_exact_log_path=use_exact_log_path)
config = {
"meta_config": {"seed": 0, "verbose": False},
"learner_config": {
"n_epochs": 5,
"epochs_learned": 1,
"steps_learned": 0,
"skip_learned_data": True,
"online_eval": False,
"optimizer_config": {"lr": 0.01, "grad_clip": 100},
"optimizer_path": (
f"{dirpath}/" "best_model_model_all_train_loss.optimizer.pth"
),
"scheduler_path": (
f"{dirpath}/" "best_model_model_all_train_loss.scheduler.pth"
),
},
"model_config": {
"model_path": f"{dirpath}/best_model_model_all_train_loss.model.pth"
},
"logging_config": {
"counter_unit": "batch",
"evaluation_freq": 5,
"writer_config": {
"writer": "json",
"write_loss_per_step": True,
"verbose": True,
},
"checkpointing": True,
"checkpointer_config": {
"checkpoint_path": None,
"checkpoint_freq": 1,
"checkpoint_metric": {"model/all/train/loss": "min"},
"checkpoint_task_metrics": None,
"checkpoint_runway": 1,
"checkpoint_all": False,
"clear_intermediate_checkpoints": True,
"clear_all_checkpoints": False,
},
},
}
Meta.update_config(config)
if Meta.config["model_config"]["model_path"]:
model.load(Meta.config["model_config"]["model_path"])
# Learning
emmental_learner.learn(
model,
[train_dataloader, dev_dataloader],
)
test_score = model.score(test_dataloader)
assert test_score["task1/synthetic/test/loss"] <= 0.4
shutil.rmtree(dirpath)
| 29.560924
| 86
| 0.540118
| 1,501
| 14,071
| 4.748834
| 0.117255
| 0.026936
| 0.021605
| 0.029461
| 0.949776
| 0.940236
| 0.938412
| 0.938412
| 0.938412
| 0.938412
| 0
| 0.019139
| 0.331604
| 14,071
| 475
| 87
| 29.623158
| 0.738756
| 0.02594
| 0
| 0.801527
| 0
| 0
| 0.216434
| 0.068779
| 0
| 0
| 0
| 0
| 0.010178
| 1
| 0.025445
| false
| 0
| 0.020356
| 0.005089
| 0.066158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5819b317f141fe7590704b2114ba33dc890f2d27
| 146
|
py
|
Python
|
cache_it/__init__.py
|
propername/cache_it
|
68296c8d6809eb011ec6bdc81da5142f23df3211
|
[
"MIT"
] | null | null | null |
cache_it/__init__.py
|
propername/cache_it
|
68296c8d6809eb011ec6bdc81da5142f23df3211
|
[
"MIT"
] | null | null | null |
cache_it/__init__.py
|
propername/cache_it
|
68296c8d6809eb011ec6bdc81da5142f23df3211
|
[
"MIT"
] | null | null | null |
from cache_it.cache_it import cache_me # noqa
from cache_it.cache_it import CacheFacility # noqa
from cache_it.cache_it import facility # noqa
| 36.5
| 51
| 0.815068
| 25
| 146
| 4.48
| 0.32
| 0.375
| 0.294643
| 0.428571
| 0.714286
| 0.714286
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0.143836
| 146
| 3
| 52
| 48.666667
| 0.896
| 0.09589
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
582424f5df64d0d529a305dcbb7fad5cbdd6728b
| 5,995
|
py
|
Python
|
main.py
|
D00dleman/python_one_string_sokoban
|
3d1c3f00d691d874ae99562c2d0c0ecabdea679c
|
[
"MIT"
] | null | null | null |
main.py
|
D00dleman/python_one_string_sokoban
|
3d1c3f00d691d874ae99562c2d0c0ecabdea679c
|
[
"MIT"
] | null | null | null |
main.py
|
D00dleman/python_one_string_sokoban
|
3d1c3f00d691d874ae99562c2d0c0ecabdea679c
|
[
"MIT"
] | null | null | null |
(lambda main:(lambda levels:(lambda **arguments: main(main, levels, **arguments['save_input'](arguments['save_print'](arguments['clear']("One String Sokoban\n\nw - up\na - left\ns - down\nd - right\nr - restart\ne - exit\n\nP - player\nB - box\n. - place\n# - wall\n\nPress Return to start..."), arguments))))))(lambda game_cycle, levels, **state: game_cycle(game_cycle, levels, **state['gc'](**state)) if not state['next_level_flag'] else(game_cycle(game_cycle,levels, **(state['change_level'](levels[state['next_level']], state))) if state["next_level"] != -1 else(input("Game Over!"))))([{'field_size':[7, 7], 'pers':[1, 1], 'boxes':[[2, 3], [3, 2]], 'walls':[[0, 0], [0, 1], [0, 2], [0, 3], [0, 4], [0, 5], [0, 6], [1, 0],[1, 4],[1, 6],[2, 0],[2, 4],[2, 6],[3, 0],[3, 6],[4, 0],[4, 6],[5, 0],[5, 6],[6, 0], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [6, 6]], 'places':[[1, 5], [2, 5]], 'current_level':0, 'next_level':1, }, { 'field_size':[9, 7], 'pers':[3, 1], 'boxes':[[3, 3], [3, 4], [3, 5]], 'walls':[[0, 0], [1, 0], [2, 0], [3, 0], [4, 0], [5, 0], [6, 0], [0, 1], [6, 1], [0, 2], [6, 2], [0, 3], [6, 3], [0, 4], [6, 4], [0, 5], [6, 5], [0, 6], [6, 6], [0, 7], [6, 7], [0, 8], [1, 8], [2, 8], [3, 8], [4, 8], [5, 8], [6, 8]], 'places':[[2, 7], [3, 7], [4, 7]], 'current_level': 1, 'next_level':2, }, { 'field_size':[9, 8], 'pers':[4, 1], 'boxes':[[3, 3], [5, 3], [2, 4]], 'walls':[[1, 0], [2, 0], [3, 0], [4, 0], [5, 0], [1, 1], [5, 1], [6, 1], [7, 1], [0, 2], [1, 2], [7, 2], [0, 3], [7, 3], [0, 4], [1, 4], [3, 4], [5, 4], [6, 4], [7, 4], [1, 5], [5, 5], [1, 6], [2, 6], [3, 6], [4, 6], [5, 6]], 'places':[[3, 2], [1, 3], [4, 3]], 'next_level':-1, 'current_level':2 },])(save_input=lambda arg:[arg, input()][0], change_level_tail=lambda fields, level, state:(state['update_state']('next_level_flag', False, **state) if len(fields) == 0 else state['change_level_tail'](fields[:-1], level, state["update_state"](fields[-1], level[fields[-1]], **state))), change_level=lambda level, state:(state["change_level_tail"](list(level.keys()), level, state)), current_level=-1, next_level=0, next_level_flag=True, print=lambda x, **state: [print(x), state][1], save_print=lambda x, y: [print(x), y][1], input=lambda x: input(x), field_size=[7, 7], pers=[1, 1], boxes=[[2, 3], [3, 2]], walls=[[0, 0],[0, 1],[0, 2],[0, 3], [0, 4], [0, 5], [0, 6], [1, 0], [1, 4], [1, 6], [2, 0], [2, 4], [2, 6], [3, 0], [3, 6], [4, 0], [4, 6], [5, 0], [5, 6], [6, 0], [6, 1], [6, 2], [6, 3], [6, 4], [6, 5], [6, 6]], places=[[1, 5], [2, 5]], vectors={'w': [-1, 0], 's': [1, 0], 'd': [0, 1], 'a': [0, -1]}, vector_input=lambda inp, **state: state['vectors'][inp] if inp in state['vectors'].keys() else ([0, 0]), object_add_vector=lambda obj, vec: [obj[0] + vec[0], obj[1] + vec[1]], object_in_positions_test=lambda obj, pos: obj in pos, all_true_array_test=(lambda func: lambda states_list: func(func, states_list, True) )(lambda func, states_list, current_state: current_state if len(states_list) == 0 else func(func, states_list[:-1], current_state and states_list[-1])), equal_arrays=lambda arrx, arry, **state: state['all_true_array_test']((lambda x, y: [(i in y) for i in x])(arrx, arry)), mod_state_list=lambda key, value, **state: [{i: value} if key == i else {i: state[i]} for i in state.keys()], update_state_tail=lambda current_array, old_state, **new_state: old_state['update_state_tail'](current_array[:-1], old_state, **dict(list(new_state.items()) + list(current_array[-1].items()))) if len(current_array) != 0 else new_state, update_state=lambda key, value, **state: state['update_state_tail'](state['mod_state_list'](key, value, **state), state, **{}), win_test=lambda **state: None, clear=lambda arg: [__import__("os").system("cls" if __import__('os').name == "nt" else "clear"), arg][1], render=lambda **state: state['print'](state['clear']('\n'.join([''.join([ "#" if [i, o] in state['walls'] else ( "B" if [i, o] in state['boxes'] else ( "P" if [i, o] == state['pers'] else ( "." if [i, o] in state['places'] else " "))) for o in range(state['field_size'][0])]) for i in range(state['field_size'][1])])), **state), amount_in=lambda el, array: sum([1 if i == el else 0 for i in array]), one_box_collision_test=lambda vector, perss, boxess, key, **state: (state if state['object_add_vector'](vector, boxess[-1]) in state['walls'] or state['amount_in'](state['object_add_vector'](vector, boxess[-1]), state['boxes']) == 1 else state['update_state']( 'boxes', [state['object_add_vector'](i, vector) if key == len(state["boxes"])-i_key-1 else i for i_key, i in enumerate(state['boxes'], 0)], **state["update_state"]( 'pers', perss, **state))) if perss == boxess[-1] else state['one_box_collision_test'](vector, perss, boxess[:-1], key+1, **state), box_collision_test=lambda vector, perss, **state: state['one_box_collision_test'](vector, perss, state['boxes'], 0, **state), pers_collision_test=lambda vector, **state: state['box_collision_test'](vector, state['object_add_vector'](state['pers'], vector), **state) if state['object_add_vector'](state['pers'], vector) in state['boxes'] else (state if state['object_add_vector'](state['pers'], vector) in state['walls'] else (state['update_state']('pers', state['object_add_vector'](state['pers'], vector), **state))), post_input=lambda string, **state: state if len(string) == 0 else (exit() if string[-1] == "e" else (state['update_state']('next_level', state['current_level'], **state['update_state']('next_level_flag', True, **state)) if string[-1] == "r" else (state['post_input'](string[1:], **state['pers_collision_test'](state['vector_input'](string[0], **state), **state))))), wait_input=lambda trash, **state: state['post_input'](state['input'](":"), **state), player_win=lambda **state: True if not state['equal_arrays'](state["boxes"], state["places"], **state) else False, gc=lambda **state: state['wait_input'](state['render'](**state), **state) if state['player_win'](**state) else (state['update_state']('next_level_flag', True, **state)))
| 5,995
| 5,995
| 0.607173
| 1,046
| 5,995
| 3.337476
| 0.128107
| 0.042968
| 0.050415
| 0.040103
| 0.274993
| 0.228301
| 0.173016
| 0.134059
| 0.090805
| 0.090805
| 0
| 0.061136
| 0.113261
| 5,995
| 1
| 5,995
| 5,995
| 0.595561
| 0
| 0
| 0
| 0
| 1
| 0.185624
| 0.007338
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
5832c969bede6c456e4b2075f2d6be85488cfa28
| 3,499
|
py
|
Python
|
docs/sphinx/examples/kernels/simple-powder-diffraction/sampleassembly/peaks.py
|
mcvine/mcvine
|
42232534b0c6af729628009bed165cd7d833789d
|
[
"BSD-3-Clause"
] | 5
|
2017-01-16T03:59:47.000Z
|
2020-06-23T02:54:19.000Z
|
docs/sphinx/examples/kernels/simple-powder-diffraction/sampleassembly/peaks.py
|
mcvine/mcvine
|
42232534b0c6af729628009bed165cd7d833789d
|
[
"BSD-3-Clause"
] | 293
|
2015-10-29T17:45:52.000Z
|
2022-01-07T16:31:09.000Z
|
docs/sphinx/examples/kernels/simple-powder-diffraction/sampleassembly/peaks.py
|
mcvine/mcvine
|
42232534b0c6af729628009bed165cd7d833789d
|
[
"BSD-3-Clause"
] | 1
|
2019-05-25T00:53:31.000Z
|
2019-05-25T00:53:31.000Z
|
from mccomponents.sample.diffraction.SimplePowderDiffractionKernel import Peak
peaks = [
Peak(q=2.687561, F_squared=1.690000, multiplicity=8, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=3.103329, F_squared=1.690000, multiplicity=6, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=4.388769, F_squared=1.440000, multiplicity=12, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=5.146288, F_squared=1.440000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=5.375123, F_squared=1.210000, multiplicity=8, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=6.206657, F_squared=1.210000, multiplicity=6, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=6.763548, F_squared=1.000000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=6.939254, F_squared=1.000000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=7.601572, F_squared=1.000000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=8.062684, F_squared=0.810000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=8.062684, F_squared=0.810000, multiplicity=8, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=8.777539, F_squared=0.640000, multiplicity=12, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=9.179770, F_squared=0.640000, multiplicity=48, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=9.309986, F_squared=0.640000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=9.309986, F_squared=0.640000, multiplicity=6, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=9.813587, F_squared=0.490000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=10.174943, F_squared=0.490000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=10.292577, F_squared=0.490000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=10.750246, F_squared=0.490000, multiplicity=8, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.081100, F_squared=0.360000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.081100, F_squared=0.360000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.189210, F_squared=0.360000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.611592, F_squared=0.360000, multiplicity=48, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.918560, F_squared=0.360000, multiplicity=24, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=11.918560, F_squared=0.360000, multiplicity=48, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
Peak(q=12.413314, F_squared=0.250000, multiplicity=6, intrinsic_line_width=0.000000, DebyeWaller_factor=0.000000),
]
# Generated by PeakGenerator.py from Al.laz, 15 Feb 2011 12:56
# manually added to include data regarding volume and cross sections
# unit: \AA
a = 4.049320
unitcell_volume = a**3
natoms = 4
# unit: barns
class cross_sections:
coh = natoms * 1.495
inc = natoms * 0.0082
abs = natoms * 0.231
| 71.408163
| 119
| 0.781366
| 552
| 3,499
| 4.76087
| 0.175725
| 0.138508
| 0.178082
| 0.187976
| 0.840183
| 0.788813
| 0.788813
| 0.788813
| 0.788813
| 0.788813
| 0
| 0.255734
| 0.090312
| 3,499
| 48
| 120
| 72.895833
| 0.569903
| 0.042869
| 0
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027778
| 0
| 0.138889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
58453e9ba83872331c1cc9702017c7666b06e9b1
| 6,658
|
py
|
Python
|
tests/test_api_bot_infractions.py
|
gatarelib/site
|
81c71a58b949cb346e6af95d2cc3a7c4a71f36fe
|
[
"MIT"
] | null | null | null |
tests/test_api_bot_infractions.py
|
gatarelib/site
|
81c71a58b949cb346e6af95d2cc3a7c4a71f36fe
|
[
"MIT"
] | null | null | null |
tests/test_api_bot_infractions.py
|
gatarelib/site
|
81c71a58b949cb346e6af95d2cc3a7c4a71f36fe
|
[
"MIT"
] | null | null | null |
import json
from tests import SiteTest, app
TEST_USER_ID = "test"
class ApiBotInfractionsEndpoint(SiteTest):
def test_infraction_create_invalid(self):
# Invalid infraction type
post_data_invalid_type = json.dumps(
{"type": "not_a_type", "reason": "test", "user_id": TEST_USER_ID, "actor_id": TEST_USER_ID}
)
response = self.client.post("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=post_data_invalid_type)
self.assert400(response)
def test_infraction_kick(self):
post_data_valid = json.dumps(
{"type": "kick", "reason": "test", "user_id": TEST_USER_ID, "actor_id": TEST_USER_ID}
)
response = self.client.post("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=post_data_valid)
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertTrue("id" in response.json["infraction"])
infraction_id = response.json["infraction"]["id"]
response = self.client.get(f"/bot/infractions/id/{infraction_id}", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"])
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertTrue("id" in response.json["infraction"])
self.assertEqual(response.json["infraction"]["id"], infraction_id)
self.assertTrue("active" in response.json["infraction"])
self.assertFalse(response.json["infraction"]["active"])
def test_infraction_ban(self):
post_data_valid = json.dumps(
{"type": "ban", "reason": "baddie", "user_id": TEST_USER_ID, "actor_id": TEST_USER_ID}
)
response = self.client.post("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=post_data_valid)
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertTrue("id" in response.json["infraction"])
infraction_id = response.json["infraction"]["id"]
# Check if the ban is currently applied
response = self.client.get(f"/bot/infractions/user/{TEST_USER_ID}/ban/current", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"])
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertIsNotNone(response.json["infraction"])
self.assertTrue("id" in response.json["infraction"])
self.assertEqual(response.json["infraction"]["id"], infraction_id)
self.assertIsNone(response.json["infraction"]["expires_at"])
self.assertTrue(response.json["infraction"]["active"])
# Update the expiration to 1d
patch_data_valid = json.dumps(
{"id": infraction_id, "duration": "1d"}
)
response = self.client.patch("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=patch_data_valid)
self.assert200(response)
self.assertTrue("success" in response.json)
self.assertTrue("infraction" in response.json)
self.assertTrue(response.json["success"])
self.assertIsNotNone(response.json["infraction"]["expires_at"])
self.assertTrue(response.json["infraction"]["active"])
# Disable the ban
patch_data_valid = json.dumps(
{"id": infraction_id, "active": False}
)
response = self.client.patch("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=patch_data_valid)
self.assert200(response)
self.assertTrue("success" in response.json)
self.assertTrue("infraction" in response.json)
self.assertTrue(response.json["success"])
self.assertFalse(response.json["infraction"]["active"])
# Check if there is no active ban anymore
response = self.client.get(f"/bot/infractions/user/{TEST_USER_ID}/ban/current", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"])
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertIsNone(response.json["infraction"])
# Re-activate the ban
patch_data_valid = json.dumps(
{"id": infraction_id, "active": True}
)
response = self.client.patch("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=patch_data_valid)
self.assert200(response)
self.assertTrue("success" in response.json)
self.assertTrue("infraction" in response.json)
self.assertTrue(response.json["success"])
self.assertTrue(response.json["infraction"]["active"])
# Create a new ban
post_data_valid = json.dumps(
{"type": "ban", "reason": "baddie v2.0", "user_id": TEST_USER_ID, "actor_id": TEST_USER_ID}
)
response = self.client.post("/bot/infractions", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"],
data=post_data_valid)
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertTrue("id" in response.json["infraction"])
new_infraction_id = response.json["infraction"]["id"]
# Check if the old ban is now disabled
response = self.client.get(f"/bot/infractions/id/{infraction_id}", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"])
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertFalse(response.json["infraction"]["active"])
# Check if the current ban infraction is the new infraction
response = self.client.get(f"/bot/infractions/user/{TEST_USER_ID}/ban/current", app.config["API_SUBDOMAIN"],
headers=app.config["TEST_HEADER"])
self.assert200(response)
self.assertTrue("infraction" in response.json)
self.assertEqual(response.json["infraction"]["id"], new_infraction_id)
| 49.318519
| 116
| 0.600631
| 712
| 6,658
| 5.462079
| 0.109551
| 0.120339
| 0.124454
| 0.064798
| 0.877861
| 0.84649
| 0.816662
| 0.80792
| 0.774749
| 0.739265
| 0
| 0.008237
| 0.270652
| 6,658
| 134
| 117
| 49.686567
| 0.792628
| 0.041604
| 0
| 0.718182
| 0
| 0
| 0.195731
| 0.03359
| 0
| 0
| 0
| 0
| 0.436364
| 1
| 0.027273
| false
| 0
| 0.018182
| 0
| 0.054545
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5855b0f9736f6388709a76b5e5876afb36f8997f
| 588,552
|
py
|
Python
|
Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_animationUI.py
|
windystrife/UnrealEngine_NVIDIAGameWork
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
[
"MIT"
] | 1
|
2022-01-29T18:36:12.000Z
|
2022-01-29T18:36:12.000Z
|
Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_animationUI.py
|
windystrife/UnrealEngine_NVIDIAGameWork
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
[
"MIT"
] | null | null | null |
Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/ART_animationUI.py
|
windystrife/UnrealEngine_NVIDIAGameWork
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
[
"MIT"
] | null | null | null |
import maya.cmds as cmds
from functools import partial
import os, cPickle, math
import maya.mel as mel
import maya.utils
class AnimationUI():
def __init__(self):
#check to see if there are any rigs in the scene, if not return
characters = self.getCharacters()
print characters
if len(characters) == 0:
result = cmds.confirmDialog(title = "Error", message = "No Characters found in scene. Would you like to add a character now?", button = ["Yes", "No"], defaultButton = "Yes", cancelButton = "No", dismissString = "No")
if result == "Yes":
import ART_addCharacter_UI
reload(ART_addCharacter_UI)
UI = ART_addCharacter_UI.AddCharacter_UI()
return
else:
return
#get access to our maya tools
toolsPath = cmds.internalVar(usd = True) + "mayaTools.txt"
if os.path.exists(toolsPath):
f = open(toolsPath, 'r')
self.mayaToolsDir = f.readline()
f.close()
#figure out which project the rigs are from
references = cmds.ls(type = "reference")
self.project = ""
for ref in references:
try:
projects = os.listdir(self.mayaToolsDir + "/General/Icons/ART/Thumbnails/")
proj = cmds.referenceQuery(ref, filename = True, unresolvedName = True).rpartition("Projects/")[2].partition("/")[0]
if proj in projects:
self.project = proj
resolved = cmds.referenceQuery(ref, filename = True).rpartition("Projects/")[2].partition("/")[0]
if resolved in projects:
if resolved != self.project:
refPath = cmds.referenceQuery(ref, filename = True)
cmds.confirmDialog(title = "Reference", icon = "warning", message = "This file is currently referencing a rig file that is not located in the MayaTools directory.\nCurrent Reference path: " + refPath + ".")
except:
pass
path = self.mayaToolsDir + "/General/ART/Projects/" + self.project + "/banner.jpg"
if os.path.exists(path):
projectBanner = path
else:
projectBanner = self.mayaToolsDir + "/General/Icons/ART/banner.jpg"
#create class vars
self.widgets = {}
self.formsToHide = []
self.assetEntries = []
self.mats = []
#check to see if the skeleton builder UI exists with channel box
#check to see if window exists. if so, delete
if cmds.dockControl("skeletonBuilder_dock", exists = True):
channelBox = cmds.formLayout("SkelBuilder_channelBoxFormLayout", q = True, childArray = True)
if channelBox != None:
channelBox = channelBox[0]
#reparent the channelBox Layout back to maya's window
cmds.control(channelBox, e = True, p = "MainChannelsLayersLayout")
channelBoxLayout = mel.eval('$temp1=$gChannelsLayersForm')
channelBoxForm = mel.eval('$temp1 = $gChannelButtonForm')
#edit the channel box pane's attachment to the formLayout
cmds.formLayout(channelBoxLayout, edit = True, af = [(channelBox, "left", 0),(channelBox, "right", 0), (channelBox, "bottom", 0)], attachControl = (channelBox, "top", 0, channelBoxForm))
cmds.deleteUI("skeletonBuilder_dock")
#check to see if window exists. if so, delete
if cmds.dockControl("artAnimUIDock", exists = True):
channelBox = cmds.formLayout("ART_cbFormLayout", q = True, childArray = True)
if channelBox != None:
channelBox = channelBox[0]
#reparent the channelBox Layout back to maya's window
cmds.control(channelBox, e = True, p = "MainChannelsLayersLayout")
channelBoxLayout = mel.eval('$temp1=$gChannelsLayersForm')
channelBoxForm = mel.eval('$temp1 = $gChannelButtonForm')
#edit the channel box pane's attachment to the formLayout
cmds.formLayout(channelBoxLayout, edit = True, af = [(channelBox, "left", 0),(channelBox, "right", 0), (channelBox, "bottom", 0)], attachControl = (channelBox, "top", 0, channelBoxForm))
cmds.deleteUI("artAnimUIDock")
if cmds.window("artAnimUI", exists = True):
cmds.deleteUI("artAnimUI")
#build window
self.widgets["window"] = cmds.window("artAnimUI", w = 400, h = 700, title = "Animation UI", sizeable = True)
#create the main layout
self.widgets["topLevelLayout"] = cmds.columnLayout()
#create the menu bar
self.widgets["menuBarLayout"] = cmds.menuBarLayout(w = 400, h =20, parent = self.widgets["topLevelLayout"] )
self.widgets["menuBar_settings"] = cmds.menu(label = "Settings", parent = self.widgets["menuBarLayout"])
self.widgets["menuBar_settings_channelBox"] = cmds.menuItem(label = "Show Channel Box", checkBox = False, parent = self.widgets["menuBar_settings"], c = self.showChannelBox)
self.widgets["menuBar_settings_matching"] = cmds.menuItem(label = "Match On Switch", checkBox = True, parent = self.widgets["menuBar_settings"])
#add match options
cmds.menuItem(parent = self.widgets["menuBar_settings"], divider = True)
cmds.menuItem(parent = self.widgets["menuBar_settings"], label = "Space Switch Settings", enable = False)
cmds.menuItem(parent = self.widgets["menuBar_settings"], divider = True)
self.widgets["spaceSwitch_MatchToggleCB"] = cmds.menuItem(parent = self.widgets["menuBar_settings"], label = "Match?", cb = True, c = self.saveUISettings)
self.widgets["spaceSwitch_MatchMethodCB"] = cmds.menuItem(parent = self.widgets["menuBar_settings"], label = "Match To Control?", cb = True, c = self.saveUISettings)
#Add animation menu
self.widgets["menuBar_animation"] = cmds.menu(label = "Animation", parent = self.widgets["menuBarLayout"])
self.widgets["menuBar_animation_eulerAll"] = cmds.menuItem(label = "Run Euler Filter On All", c = self.eulerFilterAll, parent = self.widgets["menuBar_animation"])
self.widgets["menuBar_animation_eulerSel"] = cmds.menuItem(label = "Run Euler Filter On Selected", c = self.eulerFilterSelected, parent = self.widgets["menuBar_animation"])
self.widgets["menuBar_animation_bakeDyn"] = cmds.menuItem(label = "Bake Dynamics to FK", c = self.bakeDynToFK, parent = self.widgets["menuBar_animation"])
#create the area for the active character controls
self.widgets["activeCharacterLayout"] = cmds.formLayout(w = 400, h = 60, parent = self.widgets["topLevelLayout"])
self.widgets["projectBanner"] = cmds.image(w = 400, h = 60, bgc = [.5, .5, .5], parent = self.widgets["activeCharacterLayout"], image = projectBanner )
self.widgets["activeCharacterThumb"] = cmds.symbolButton('activeCharacterThumb', w = 50, h = 50, parent = self.widgets["activeCharacterLayout"])
cmds.formLayout(self.widgets["activeCharacterLayout"], edit = True, af = [(self.widgets["activeCharacterThumb"], "right", 5), (self.widgets["activeCharacterThumb"], "top", 6)])
#create the character list pop-up menu
self.widgets["characterRigList"] = cmds.popupMenu(parent = self.widgets["activeCharacterThumb"], b = 1)
self.populateCharacterRigList()
#create the row column layout where the left column will contain pretty much everything, and the right column is optional for channel box display
self.widgets["rowColLayout"] = cmds.rowColumnLayout(nc = 3, cw = [(1, 400), (2, 50), (3, 1)], parent = self.widgets["topLevelLayout"])
#create the main tab Layout
self.widgets["mainLayout"] = cmds.tabLayout(w = 400, h = 700, parent = self.widgets["rowColLayout"])
#create the formLayout that will contain each character's picker
self.widgets["pickerLayout"] = cmds.formLayout(w = 400, h = 700, parent = self.widgets["mainLayout"])
self.widgets["pickerScroll"] = cmds.scrollLayout(w = 400, h = 700, hst = 0, parent = self.widgets["pickerLayout"])
#create the tools layout(50 pixel column on the screen right)
self.widgets["pickerTools"] = cmds.columnLayout(w = 50, h = 700, parent = self.widgets["rowColLayout"], rowSpacing = 10)
cmds.text(label = "")
self.widgets["pickerSelectTool"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/pickerSelect.bmp", parent = self.widgets["pickerTools"], ann = "Selection Tools")
self.widgets["pickerResetTool"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/zero.bmp", parent = self.widgets["pickerTools"], ann = "Reset Rig to the Defaults")
self.widgets["pickerImportMotionTool"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/importMotion.bmp", parent = self.widgets["pickerTools"], c = self.importMotion, ann = "Import Motion")
self.widgets["pickerExportMotionTool"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/exportMotion.bmp", parent = self.widgets["pickerTools"], c = self.exportMotion, ann = "Export Motion")
self.widgets["pickerSpaceSwitchTool"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/picker_spaceSwitch.bmp", parent = self.widgets["pickerTools"], c = self.spaceSwitcher, ann = "Space Switching")
self.widgets["pickerPoseTools"] = cmds.symbolButton(c = self.poseEditor, w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/poseTools.bmp", parent = self.widgets["pickerTools"], ann = "Pose Tools and Utilities")
self.widgets["pickerMatchOverRange"] = cmds.symbolButton(c = self.match_frameRange_UI, w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/spaceSwitchMatch_on.bmp", parent = self.widgets["pickerTools"], ann = "Match Over Frame Range")
self.widgets["pickerControlScaleTool"] = cmds.symbolButton(c = self.control_scale_init, w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/controlScale.bmp", parent = self.widgets["pickerTools"], ann = "Scale selected controls' size. Use to make controls larger or smaller for selecting.")
self.widgets["pickerControlVisibility"] = cmds.iconTextCheckBox(w = 50, h = 50, style='iconOnly', value = True, selectionImage = self.mayaToolsDir + "/General/Icons/ART/pickerVisible.bmp", image = self.mayaToolsDir + "/General/Icons/ART/pickerInvisible.bmp", parent = self.widgets["pickerTools"], onc = partial(self.toggleControlVis, True), ofc = partial(self.toggleControlVis, False), ann = "Toggle Current Rig's Control Visibility" )
self.widgets["pickerSelectionSets"] = cmds.symbolButton(c = self.control_scale_init, w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/selectionSets.bmp", parent = self.widgets["pickerTools"], ann = "Selection Sets that are created are stored here.")
self.widgets["pickerHelpMenu"] = cmds.symbolButton(w = 50, h = 50, image = self.mayaToolsDir + "/General/Icons/ART/helpicon.bmp", parent = self.widgets["pickerTools"], c = self.animHelp, ann = "Help")
#create popup menu for space switcher
menu = cmds.popupMenu(parent = self.widgets["pickerSpaceSwitchTool"], b = 3)
cmds.menuItem(label = "Create Space", parent = menu, c = self.createSpace)
#create radial menu for pose button for copy, paste, and paste opposite
self.widgets["pickerPoseToolsRadial"] = cmds.popupMenu(b = 3, parent = self.widgets["pickerPoseTools"], mm = True)
self.widgets["pickerPoseToolsRadial_copy"] = cmds.menuItem(label = "Copy", parent = self.widgets["pickerPoseToolsRadial"], rp = "N", c = self.copyPose)
self.widgets["pickerPoseToolsRadial_paste"] = cmds.menuItem(label = "Paste", parent = self.widgets["pickerPoseToolsRadial"], rp = "W", c = self.pastePose)
self.widgets["pickerPoseToolsRadial_pasteOpposite"] = cmds.menuItem(label = "Paste Opposite", parent = self.widgets["pickerPoseToolsRadial"], rp = "S", c = self.pastePoseOpposite)
self.widgets["pickerPoseToolsRadial_pastePreview"] = cmds.menuItem(label = "Show Paste Controls", parent = self.widgets["pickerPoseToolsRadial"], rp = "NW", c = self.pastePreview)
self.widgets["pickerPoseToolsRadial_pastePreviewOpp"] = cmds.menuItem(label = "Show Paste Opposite Controls", parent = self.widgets["pickerPoseToolsRadial"], rp = "SW", c = self.pasteOppositePreview)
#create radial menu for iso select tools
self.widgets["isoSelectRadial"] = cmds.popupMenu(b = 3, parent = self.widgets["pickerControlVisibility"])
self.widgets["isoSelect_Generate"] = cmds.menuItem(label = "Generate Iso Selection Sets", parent = self.widgets["isoSelectRadial"], c = self.getIsoSelectionPolygons, enable = True)
cmds.menuItem(divider = True, parent = self.widgets["isoSelectRadial"] )
self.widgets["isoSelect_Torso"] = cmds.menuItem(label = "Torso", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_LeftArm"] = cmds.menuItem(label = "Left Arm", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_RightArm"] = cmds.menuItem(label = "Right Arm", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_LeftLeg"] = cmds.menuItem(label = "Left Leg", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_RightLeg"] = cmds.menuItem(label = "Right Leg", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_Head"] = cmds.menuItem(label = "Head", parent = self.widgets["isoSelectRadial"], c = self.isoSelect, cb = True, enable = False)
self.widgets["isoSelect_ShowAll"] = cmds.menuItem(label = "Show All", parent = self.widgets["isoSelectRadial"], c = self.exitIso, enable = False)
cmds.menuItem(divider = True, parent = self.widgets["isoSelectRadial"] )
cmds.menuItem(label = "Isolation Method:", parent = self.widgets["isoSelectRadial"], enable = False)
isoMethodCollection = cmds.radioMenuItemCollection(parent = self.widgets["isoSelectRadial"])
self.widgets["isoMethodClassic"] = cmds.menuItem(label = "Classic", rb = True, cl = isoMethodCollection, parent = self.widgets["isoSelectRadial"], ann = "Use Maya's isolate selection, where everything except selection is hidden", c = self.exitIso)
self.widgets["isoMethodMaterial"] = cmds.menuItem(label = "Material", rb = False, cl = isoMethodCollection, parent = self.widgets["isoSelectRadial"], c = self.exitIso, ann = "Use custom isolate selection function where any unselected parts are invisible, but the rest of the scene does not get hidden.")
#selection sets menu
self.widgets["selectionSetMenuPopUp"] = cmds.popupMenu(b = 1, parent = self.widgets["pickerSelectionSets"])
#create the selection popupMenu
self.widgets["pickerSelectionToolPopup"] = cmds.popupMenu(b = 1, parent = self.widgets["pickerSelectTool"])
self.widgets["pickerSelectionToolPopup_Controls"] = cmds.menuItem(label = "Select All Controls", parent = self.widgets["pickerSelectionToolPopup"], c = self.selectAll)
self.widgets["pickerSelectionToolPopup_All"] = cmds.menuItem(label = "Select All (Controls + Spaces)", parent = self.widgets["pickerSelectionToolPopup"], c = self.selectEverything)
self.widgets["pickerSelectionToolPopup_Settings"] = cmds.menuItem(label = "Select Rig Settings", parent = self.widgets["pickerSelectionToolPopup"], c = self.selectRigSettings)
self.widgets["pickerSelectionToolPopup_Custom"] = cmds.menuItem(label = "Create Selection Set", parent = self.widgets["pickerSelectionToolPopup"], c = self.createSelectionSet)
self.widgets["selectionSetsCustom"] = cmds.menuItem(label = "Selection Sets", parent = self.widgets["pickerSelectionToolPopup"], subMenu = True, tearOff = True)
#create the reset popupMenu
self.widgets["pickerResetToolPopup"] = cmds.popupMenu(b = 1, parent = self.widgets["pickerResetTool"])
self.widgets["pickerResetToolPopup_All"] = cmds.menuItem(label = "Zero out All", parent = self.widgets["pickerResetToolPopup"], c = self.resetAll)
self.widgets["pickerResetToolPopup_Selected"] = cmds.menuItem(label = "Zero out Selected", parent = self.widgets["pickerResetToolPopup"], c = self.resetSelection)
#create the character picker for each character found in the scene
characters = self.getCharacters()
for character in characters:
self.createCharacterPicker(character, self.widgets["pickerScroll"])
#create the list view picker for each character found in the scene
self.widgets["listViewLayout"] = cmds.formLayout(w = 400, h = 700, parent = self.widgets["mainLayout"])
for character in characters:
self.createListView(character, self.widgets["listViewLayout"])
#create channel box layout
self.widgets["cbFormLayout"] = cmds.formLayout("ART_cbFormLayout", w = 200, h = 700, parent = self.widgets["rowColLayout"])
#create the rig settings tab
self.widgets["rigSettingsLayout"] = cmds.formLayout(w = 400, h = 700, parent = self.widgets["mainLayout"])
self.widgets["rigSettingsScroll"] = cmds.scrollLayout(w = 400, h = 700, hst = 0, parent = self.widgets["rigSettingsLayout"])
for character in characters:
self.createRigSettings(character, self.widgets["rigSettingsScroll"])
#name the tabs
cmds.tabLayout(self.widgets["mainLayout"], edit = True, tabLabel = [(self.widgets["pickerLayout"], "Picker"), (self.widgets["listViewLayout"], "List View"), (self.widgets["rigSettingsLayout"], "Rig Settings")])
#show window
self.widgets["dock"] = cmds.dockControl("artAnimUIDock", label = "Animation Interface", content = self.widgets["window"], area = "right", allowedArea = "right", visibleChangeCommand = self.interfaceScriptJob)
#add attributes to controls
self.setupButtonAttrsOnControls()
#setup selection scriptJob
self.scriptJob = cmds.scriptJob(parent = self.widgets["window"], event = ["SelectionChanged", self.selectionScriptJob], kws = True)
#set the current selected character and change the thumbnail
selected = characters[len(characters)-1]
self.setThumbnail(selected, self.project)
self.switchActiveCharacter(selected)
#get all controls
self.controls = []
for control in ["head_fk_anim", "neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim", "spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim", "mid_ik_anim", "chest_ik_anim",
"body_anim", "hip_anim", "clavicle_l_anim", "clavicle_r_anim", "fk_arm_l_anim", "fk_arm_r_anim", "fk_elbow_l_anim", "fk_elbow_r_anim", "fk_wrist_l_anim", "fk_wrist_r_anim",
"ik_elbow_l_anim", "ik_elbow_r_anim", "ik_wrist_l_anim", "ik_wrist_r_anim", "fk_thigh_l_anim", "fk_thigh_r_anim", "fk_calf_l_anim", "fk_calf_r_anim", "fk_foot_l_anim", "fk_foot_r_anim",
"fk_ball_l_anim", "fk_ball_r_anim", "ik_foot_anim_l", "ik_foot_anim_r", "heel_ctrl_l", "heel_ctrl_r", "toe_wiggle_ctrl_l", "toe_wiggle_ctrl_r",
"toe_tip_ctrl_l", "toe_tip_ctrl_r", "master_anim", "offset_anim", "root_anim", "upperarm_l_twist_anim", "upperarm_l_twist_2_anim", "upperarm_l_twist_3_anim", "upperarm_r_twist_anim", "upperarm_r_twist_2_anim", "upperarm_r_twist_3_anim", "l_thigh_twist_01_anim", "r_thigh_twist_01_anim",
"pinky_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_r", "pinky_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_r",
"ring_metacarpal_ctrl_l", "ring_metacarpal_ctrl_r", "ring_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_r",
"middle_metacarpal_ctrl_l", "middle_metacarpal_ctrl_r", "middle_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_r",
"index_metacarpal_ctrl_l", "index_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_l", "index_finger_fk_ctrl_3_r",
"thumb_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_3_r",
"index_l_ik_anim", "index_r_ik_anim", "middle_l_ik_anim", "middle_r_ik_anim", "ring_l_ik_anim", "ring_r_ik_anim", "pinky_l_ik_anim", "pinky_r_ik_anim", "thumb_l_ik_anim", "thumb_r_ik_anim",
"index_l_poleVector", "index_r_poleVector", "middle_l_poleVector", "middle_r_poleVector", "ring_l_poleVector", "ring_r_poleVector", "pinky_l_poleVector", "pinky_r_poleVector", "thumb_l_poleVector", "thumb_r_poleVector",
"l_global_ik_anim", "r_global_ik_anim", "lowerarm_l_twist_anim", "lowerarm_l_twist2_anim", "lowerarm_l_twist3_anim", "lowerarm_r_twist_anim", "lowerarm_r_twist2_anim", "lowerarm_r_twist3_anim", "calf_r_twist_anim", "calf_r_twist2_anim", "calf_r_twist3_anim",
"calf_l_twist_anim", "calf_l_twist2_anim", "calf_l_twist3_anim", "thigh_l_twist_2_anim", "thigh_l_twist_3_anim", "thigh_r_twist_2_anim", "thigh_r_twist_3_anim"]:
self.controls.append(control)
#hack
character = selected
for obj in ["fk_clavicle_l_anim", "fk_clavicle_r_anim"]:
if cmds.objExists(character + ":" + obj):
self.controls.append(obj)
#find custom joints
character = selected
customJoints = []
attrs = cmds.listAttr(character + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
for joint in customJoints:
attribute = cmds.getAttr(character + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "leaf":
label = label.partition(" (")[0]
control = label + "_anim"
self.controls.append(control)
if jointType == "jiggle":
control = label + "_anim"
self.controls.append(control)
if jointType == "chain" or jointType == "dynamic":
numJointsInChain = label.partition("(")[2].partition(")")[0]
label = label.partition(" (")[0]
self.controls.append(label + "_dyn_anim")
cmds.select("*:" + label + "_ik_*_anim")
selection = cmds.ls(sl = True)
for each in selection:
niceName = each.partition(":")[2]
self.controls.append(niceName)
for i in range(int(numJointsInChain)):
self.controls.append("fk_" + label + "_0" + str(i + 1) + "_anim")
self.controls.append(label + "_cv_" + str(i) + "_anim")
#load UI settings
self.loadUISettings()
self.findCustomSelectionSets()
#create script job for updating ui
self.updateUI_scriptJob()
#setup the scriptJob
cmds.scriptJob(event = ["readingFile", self.killUIScriptJob], runOnce = True, kws = True)
cmds.scriptJob(event = ["SceneSaved", self.exitIsoOnSave], parent = self.widgets["window"], kws = True, runOnce = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def bakeDynToFK(self, *args):
#find all of the dynamic controls
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
cmds.select(character + ":" + "*dyn_anim")
dynControls = cmds.ls(sl = True)
if len(dynControls) > 0:
#list the controls in a UI
if cmds.window("bakeDynToFKControls_UI", exists = True):
cmds.deleteUI("bakeDynToFKControls_UI")
window = cmds.window("bakeDynToFKControls_UI", title = "Bake Dynamics", w = 300, h = 400, sizeable = True, mnb = False, mxb = False)
mainLayout = cmds.formLayout(w = 300, h = 400)
#textScrollList
self.widgets["bakeDynToFK_List"] = cmds.textScrollList(w = 200, h = 300, allowMultiSelection = True, parent = mainLayout)
for control in dynControls:
cmds.textScrollList(self.widgets["bakeDynToFK_List"], edit = True, append = control)
cmds.formLayout(mainLayout, edit = True, af = [(self.widgets["bakeDynToFK_List"], "top", 50), (self.widgets["bakeDynToFK_List"], "left", 5)] )
#process button
button = cmds.button(w = 80, h = 40, label = "Bake", c = self.bakeDynToFK_Process)
cmds.formLayout(mainLayout, edit = True, af = [(button, "bottom", 50),(button, "right", 5)])
cmds.showWindow(window)
else:
cmds.warning("No Dynamic controls found on the current character.")
return
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def bakeDynToFK_Process(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
#get the selected controls in the list
controlsToBake = cmds.textScrollList(self.widgets["bakeDynToFK_List"], q = True, si = True)
if controlsToBake != None:
#find the corresponding fk controls
for control in controlsToBake:
name = control.partition(":")[2].partition("_dyn_anim")[0]
masterGrp = character + ":" + name + "_master_ctrl_grp"
cmds.select(masterGrp, hi = True)
nodes = cmds.ls(sl = True)
fkControls = []
for node in nodes:
if node.find(":fk_") != -1:
if node.find("_anim") != -1:
if cmds.nodeType(node) == "transform":
fkControls.append(node)
#constrain the fk controls to the dny joints
#Husk_Base:fk_hood_01_anim
constraints = []
for control in fkControls:
niceName = control.partition("fk_")[2].partition("_anim")[0]
joint = character + ":" + "rig_dyn_" + niceName
constraint = cmds.orientConstraint(joint, control)[0]
constraints.append(constraint)
#select the FK controls and bake
start = cmds.playbackOptions(q = True, min = True)
end = cmds.playbackOptions(q = True, max = True)
cmds.select(clear = True)
for control in fkControls:
cmds.select(control, add = True)
cmds.bakeResults(simulation = True, time = (start, end))
cmds.delete(constraints)
#set to FK
cmds.setAttr(character + ":" + "Rig_Settings." + name + "_fk", 1)
cmds.setAttr(character + ":" + "Rig_Settings." + name + "_ik", 0)
cmds.setAttr(character + ":" + "Rig_Settings." + name + "_dynamic", 0)
#delete the UI
cmds.deleteUI("bakeDynToFKControls_UI")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getIsoSelectionPolygons(self, *args):
cmds.progressWindow(title='Animation UI', progress=0, status='Building Iso Selection Sets', isInterruptable=True )
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
#create the list of iso selection polygons
self.torsoFaces = []
self.leftArmFaces = []
self.rightArmFaces = []
self.leftLegFaces = []
self.rightLegFaces = []
self.headFaces = []
#lists of what joints belong to which "part"
torso = ["pelvis", "spine_01", "spine_02", "spine_03", "spine_04", "spine_05", "clavicle_l", "clavicle_r"]
leftArm = ["upperarm_l", "lowerarm_l", "hand_l", "index_metacarpal_l", "index_01_l", "index_02_l", "index_03_l", "middle_metacarpal_l", "middle_01_l", "middle_02_l", "middle_03_l", "ring_metacarpal_l", "ring_01_l", "ring_02_l", "ring_03_l", "pinky_metacarpal_l", "pinky_01_l", "pinky_02_l", "pinky_03_l", "thumb_01_l", "thumb_02_l", "thumb_03_l", "lowerarm_twist_01_l", "lowerarm_twist_02_l", "lowerarm_twist_03_l", "upperarm_twist_01_l", "upperarm_twist_02_l", "upperarm_twist_03_l" ]
rightArm = ["upperarm_r", "lowerarm_r", "hand_r", "index_metacarpal_r", "index_01_r", "index_02_r", "index_03_r", "middle_metacarpal_r", "middle_01_r", "middle_02_r", "middle_03_r", "ring_metacarpal_r", "ring_01_r", "ring_02_r", "ring_03_r", "pinky_metacarpal_r", "pinky_01_r", "pinky_02_r", "pinky_03_r", "thumb_01_r", "thumb_02_r", "thumb_03_r", "lowerarm_twist_01_r", "lowerarm_twist_02_r", "lowerarm_twist_03_r", "upperarm_twist_01_r", "upperarm_twist_02_r", "upperarm_twist_03_r" ]
leftLeg = ["thigh_l", "calf_l", "foot_l", "ball_l", "thigh_twist_01_l", "thigh_twist_02_l", "thigh_twist_03_l", "calf_twist_01_l", "calf_twist_02_l", "calf_twist_03_l"]
rightLeg = ["thigh_r", "calf_r", "foot_r", "ball_r", "thigh_twist_01_r", "thigh_twist_02_r", "thigh_twist_03_r", "calf_twist_01_r", "calf_twist_02_r", "calf_twist_03_r"]
head = ["neck_01", "neck_02", "neck_03", "head"]
characterGeo = []
#find all of the skin clusters. In each one, find the weighted joints and the geometry weighted to those joints
skinClusters = cmds.ls(type = 'skinCluster')
for skin in skinClusters:
weightedJoints = cmds.skinCluster(skin, q = True, weightedInfluence = True)
for joint in weightedJoints:
#add faces to the iso selection lists
if joint.partition(character + ":")[2] in torso:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "Torso", skin])
if joint.partition(character + ":")[2] in leftArm:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "LeftArm", skin])
if joint.partition(character + ":")[2] in rightArm:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "RightArm", skin])
if joint.partition(character + ":")[2] in leftLeg:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "LeftLeg", skin])
if joint.partition(character + ":")[2] in rightLeg:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "RightLeg", skin])
if joint.partition(character + ":")[2] in head:
geometryShape = cmds.skinCluster(skin, q = True, geometry = True)
geometry = cmds.listRelatives(geometryShape, parent = True)[0]
characterGeo.append([geometry, "Head", skin])
progress = 100/len(characterGeo)
originalProgress = 100/len(characterGeo)
for geo in characterGeo:
geom = geo[0]
part = geo[1]
skin = geo[2]
polys = cmds.polyEvaluate(geom, face = True)
cmds.progressWindow(edit = True, progress = progress, status='Building Iso Selection Sets')
progress = progress + originalProgress
for i in range(int(polys)):
transforms = cmds.skinPercent( skin, geom + ".f[" + str(i) + "]", ib = .25, query=True, t= None)
if transforms != None:
if part == "Torso":
for transform in transforms:
if transform.partition(character + ":")[2] in torso:
self.torsoFaces.append(geom + ".f[" + str(i) + "]")
if part == "LeftArm":
for transform in transforms:
if transform.partition(character + ":")[2] in leftArm:
self.leftArmFaces.append(geom + ".f[" + str(i) + "]")
if part == "RightArm":
for transform in transforms:
if transform.partition(character + ":")[2] in rightArm:
self.rightArmFaces.append(geom + ".f[" + str(i) + "]")
if part == "LeftLeg":
for transform in transforms:
if transform.partition(character + ":")[2] in leftLeg:
self.leftLegFaces.append(geom + ".f[" + str(i) + "]")
if part == "RightLeg":
for transform in transforms:
if transform.partition(character + ":")[2] in rightLeg:
self.rightLegFaces.append(geom + ".f[" + str(i) + "]")
if part == "Head":
for transform in transforms:
if transform.partition(character + ":")[2] in head:
self.headFaces.append(geom + ".f[" + str(i) + "]")
cmds.progressWindow(endProgress=1)
#enable menu items
cmds.menuItem(self.widgets["isoSelect_Torso"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_LeftArm"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_RightArm"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_LeftLeg"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_RightLeg"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_Head"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_ShowAll"], edit = True, enable = True)
cmds.menuItem(self.widgets["isoSelect_Generate"], edit = True, enable = False)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def exitIsoOnSave(self, *args):
#unIsolate
if self.mats != []:
for mat in self.mats:
face = mat[0]
sg = mat[1]
cmds.sets(face, forceElement = sg)
if cmds.objExists("isoSelect_M"):
cmds.delete("isoSelect_M")
if cmds.objExists("isoSelect_Set"):
cmds.delete("isoSelect_Set")
for checkbox in [[self.widgets["isoSelect_Torso"], "Torso"], [self.widgets["isoSelect_LeftArm"], "LeftArm"], [self.widgets["isoSelect_RightArm"], "RightArm"], [self.widgets["isoSelect_LeftLeg"], "LeftLeg"], [self.widgets["isoSelect_RightLeg"], "RightLeg"], [self.widgets["isoSelect_Head"], "Head"]]:
cb = checkbox[0]
cmds.menuItem(cb, edit = True, cb = True)
#ReSave Scene
filename = cmds.file(q = True, sceneName = True)
filetype = filename.rpartition(".")[2]
if filetype == "mb":
filetype = "mayaBinary"
if filetype == "ma":
filetype = "mayaAscii"
cmds.file(save = True, force = True, type = filetype)
cmds.scriptJob(event = ["SceneSaved", self.exitIsoOnSave], parent = self.widgets["window"], kws = True, runOnce = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def exitIso(self, *args):
#unIsolate
isoPnl = cmds.getPanel(wf=True)
try:
isoCrnt = cmds.isolateSelect(isoPnl, q=True, s=True)
if isoCrnt != False:
mel.eval('enableIsolateSelect %s %d' % (isoPnl,not isoCrnt) )
except:
cmds.warning("Invalid viewport for isolate select command")
if self.mats != []:
for mat in self.mats:
face = mat[0]
sg = mat[1]
cmds.sets(face, forceElement = sg)
if cmds.objExists("isoSelect_M"):
cmds.delete("isoSelect_M")
if cmds.objExists("isoSelect_Set"):
cmds.delete("isoSelect_Set")
for checkbox in [[self.widgets["isoSelect_Torso"], "Torso"], [self.widgets["isoSelect_LeftArm"], "LeftArm"], [self.widgets["isoSelect_RightArm"], "RightArm"], [self.widgets["isoSelect_LeftLeg"], "LeftLeg"], [self.widgets["isoSelect_RightLeg"], "RightLeg"], [self.widgets["isoSelect_Head"], "Head"]]:
cb = checkbox[0]
cmds.menuItem(cb, edit = True, cb = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def isoSelect(self, *args):
#unIsolate
isoPnl = cmds.getPanel(wf=True)
try:
isoCrnt = cmds.isolateSelect(isoPnl, q=True, s=True)
if isoCrnt != False:
mel.eval('enableIsolateSelect %s %d' % (isoPnl,not isoCrnt) )
except:
cmds.warning("Invalid viewport for isolate select command")
if self.mats != []:
for mat in self.mats:
face = mat[0]
sg = mat[1]
cmds.sets(face, forceElement = sg)
if cmds.objExists("isoSelect_M"):
cmds.delete("isoSelect_M")
if cmds.objExists("isoSelect_Set"):
cmds.delete("isoSelect_Set")
#find isolation method
classic = cmds.menuItem(self.widgets["isoMethodClassic"], q = True, rb = True)
material = cmds.menuItem(self.widgets["isoMethodMaterial"], q = True, rb = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
if material:
#find checkbox values
isolatedItems = []
for checkbox in [[self.widgets["isoSelect_Torso"], "Torso"], [self.widgets["isoSelect_LeftArm"], "LeftArm"], [self.widgets["isoSelect_RightArm"], "RightArm"], [self.widgets["isoSelect_LeftLeg"], "LeftLeg"], [self.widgets["isoSelect_RightLeg"], "RightLeg"], [self.widgets["isoSelect_Head"], "Head"]]:
cb = checkbox[0]
part = checkbox[1]
value = cmds.menuItem(cb, q = True, cb = True)
if value == False:
if part == "Torso":
cmds.select(self.torsoFaces)
isolatedItems.append(self.torsoFaces)
if part == "LeftArm":
isolatedItems.append(self.leftArmFaces)
if part == "RightArm":
isolatedItems.append(self.rightArmFaces)
if part == "LeftLeg":
isolatedItems.append(self.leftLegFaces)
if part == "RightLeg":
isolatedItems.append(self.rightLegFaces)
if part == "Head":
isolatedItems.append(self.headFaces)
#create your iso selection
cmds.select(clear = True)
for each in [self.torsoFaces, self.leftArmFaces, self.rightArmFaces, self.leftLegFaces, self.rightLegFaces, self.headFaces]:
if each in isolatedItems:
cmds.select(each, add = True)
#grab the current selection
selection = cmds.ls(sl = True)
self.mats = []
#get assinged material
faces = cmds.ls(sl = True)
for face in faces:
shaders = cmds.ls(type = "shadingEngine")
for each in shaders:
connectedFaces = cmds.sets(each, q = True)
if connectedFaces != None:
for obj in connectedFaces:
if obj.find(face.rpartition(".")[0]) != -1:
self.mats.append([face, each])
newMat = cmds.shadingNode("lambert", asShader = True, name = "isoSelect_M")
cmds.setAttr(newMat + ".transparency", 1, 1, 1, type = "double3")
shadingGroup = cmds.sets(name = "isoSelect_Set", renderable = True, noSurfaceShader = True, empty = True)
cmds.connectAttr(newMat + ".outColor", shadingGroup + ".surfaceShader")
if selection:
cmds.select(selection)
for each in selection:
cmds.sets(each, forceElement = shadingGroup)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
if classic:
#find checkbox values
isolatedItems = []
cmds.select(clear = True)
for checkbox in [[self.widgets["isoSelect_Torso"], "Torso"], [self.widgets["isoSelect_LeftArm"], "LeftArm"], [self.widgets["isoSelect_RightArm"], "RightArm"], [self.widgets["isoSelect_LeftLeg"], "LeftLeg"], [self.widgets["isoSelect_RightLeg"], "RightLeg"], [self.widgets["isoSelect_Head"], "Head"]]:
cb = checkbox[0]
part = checkbox[1]
value = cmds.menuItem(cb, q = True, cb = True)
if value == True:
if part == "Torso":
cmds.select(self.torsoFaces, add = True)
if part == "LeftArm":
cmds.select(self.leftArmFaces, add = True)
if part == "RightArm":
cmds.select(self.rightArmFaces, add = True)
if part == "LeftLeg":
cmds.select(self.leftLegFaces, add = True)
if part == "RightLeg":
cmds.select(self.rightLegFaces, add = True)
if part == "Head":
cmds.select(self.headFaces, add = True)
#isolate the selection
isoPnl = cmds.getPanel(wf=True)
try:
isoCrnt = cmds.isolateSelect(isoPnl, q=True, s=True)
mel.eval('enableIsolateSelect %s %d' % (isoPnl,not isoCrnt) )
except:
cmds.warning("Invalid viewport for isolate select command")
cmds.select(clear = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def killUIScriptJob(self):
#delete script job
cmds.scriptJob(kill = self.mainScriptJob)
if cmds.dockControl("artAnimUIDock", exists = True):
channelBox = cmds.formLayout("ART_cbFormLayout", q = True, childArray = True)
if channelBox != None:
channelBox = channelBox[0]
#reparent the channelBox Layout back to maya's window
cmds.control(channelBox, e = True, p = "MainChannelsLayersLayout")
channelBoxLayout = mel.eval('$temp1=$gChannelsLayersForm')
channelBoxForm = mel.eval('$temp1 = $gChannelButtonForm')
#edit the channel box pane's attachment to the formLayout
cmds.formLayout(channelBoxLayout, edit = True, af = [(channelBox, "left", 0),(channelBox, "right", 0), (channelBox, "bottom", 0)], attachControl = (channelBox, "top", 0, channelBoxForm))
cmds.deleteUI("artAnimUIDock")
if cmds.window("artAnimUI", exists = True):
cmds.deleteUI("artAnimUI")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def interfaceScriptJob(self, *args):
#unisolate any isolated parts
self.exitIso()
if cmds.dockControl(self.widgets["dock"], q = True, visible = True) == False:
#re-sort out the channel box
channelBox = cmds.formLayout("ART_cbFormLayout", q = True, childArray = True)
if channelBox != None:
channelBox = channelBox[0]
#reparent the channelBox Layout back to maya's window
cmds.control(channelBox, e = True, p = "MainChannelsLayersLayout")
channelBoxLayout = mel.eval('$temp1=$gChannelsLayersForm')
channelBoxForm = mel.eval('$temp1 = $gChannelButtonForm')
#edit the channel box pane's attachment to the formLayout
cmds.formLayout(channelBoxLayout, edit = True, af = [(channelBox, "left", 0),(channelBox, "right", 0), (channelBox, "bottom", 0)], attachControl = (channelBox, "top", 0, channelBoxForm))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def loadUISettings(self, *args):
settingsLocation = self.mayaToolsDir + "/General/Scripts/settings.txt"
if os.path.exists(settingsLocation):
f = open(settingsLocation, 'r')
settings = cPickle.load(f)
#set the UI settings based on file
channelBox = settings.get("ChannelBox")
if channelBox == True:
cmds.menuItem(self.widgets["menuBar_settings_channelBox"], edit = True, checkBox = True)
self.showChannelBox()
if channelBox == False:
cmds.menuItem(self.widgets["menuBar_settings_channelBox"], edit = True, checkBox = False)
self.showChannelBox()
#space switch settings
match = settings.get("Match")
if match == True:
cmds.menuItem(self.widgets["spaceSwitch_MatchToggleCB"], edit = True, checkBox = True)
if match == False:
cmds.menuItem(self.widgets["spaceSwitch_MatchToggleCB"], edit = True, checkBox = False)
f.close()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def saveUISettings(self, *args):
#this function will save out the user's preferences they have set in the UI to disk
settingsLocation = self.mayaToolsDir + "/General/Scripts/settings.txt"
f = open(settingsLocation, 'w')
#Channel Box display settings
value = cmds.menuItem(self.widgets["menuBar_settings_channelBox"], q = True, checkBox = True)
#Space switch settings
match = cmds.menuItem(self.widgets["spaceSwitch_MatchToggleCB"], q = True, cb = True)
method = cmds.menuItem(self.widgets["spaceSwitch_MatchMethodCB"], q = True, cb = True)
#create a dictionary with these values
settings = {}
settings["ChannelBox"] = value
settings["Match"] = match
settings["MatchMethod"] = method
#write our dictionary to file
cPickle.dump(settings, f)
f.close()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def showChannelBox(self, *args):
#get the value of the checkbox in the menu Item
value = cmds.menuItem(self.widgets["menuBar_settings_channelBox"], q = True, checkBox = True)
self.channelBoxLayout = mel.eval('$temp1=$gChannelsLayersForm')
self.channelBoxForm = mel.eval('$temp1 = $gChannelButtonForm')
if value == True:
self.channelBox = mel.eval('$temp1=$gChannelsLayersPane;')
#unhide the column that will house the channel box
cmds.rowColumnLayout(self.widgets["rowColLayout"], edit = True, cw = [(1, 400), (2, 50), (3, 220)])
#parent the channel box to our anim UI
cmds.control(self.channelBox, e = True, p = self.widgets["cbFormLayout"])
cmds.formLayout(self.widgets["cbFormLayout"], edit = True, af = [(self.channelBox, "left", 0),(self.channelBox, "right", 0), (self.channelBox, "bottom", 0), (self.channelBox, "top", 0)])
channelBox = cmds.formLayout(self.widgets["cbFormLayout"], q = True, childArray = True)[0]
self.channelBox = channelBox
if value == False:
#hide the column for the channel box in our anim UI
cmds.rowColumnLayout(self.widgets["rowColLayout"], edit = True, cw = [(1, 400), (2, 50), (3, 1)])
try:
#reparent the channelBox Layout back to maya's window
cmds.control(self.channelBox, e = True, p = "MainChannelsLayersLayout")
#edit the channel box pane's attachment to the formLayout
cmds.formLayout(self.channelBoxLayout, edit = True, af = [(self.channelBox, "left", 0),(self.channelBox, "right", 0), (self.channelBox, "bottom", 0)], attachControl = (self.channelBox, "top", 0, self.channelBoxForm))
except AttributeError:
print "channel box restored"
self.saveUISettings()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setThumbnail(self, characterName, project):
projects = os.listdir(self.mayaToolsDir + "/General/Icons/ART/Thumbnails/")
thumbnailPath = self.mayaToolsDir + "/General/Icons/ART/Thumbnails/" + self.project + "/"
thumbs = os.listdir(thumbnailPath)
found = False
for thumb in thumbs:
if thumb.find("_small") != -1:
thumbName = thumb.rpartition(".")[0]
thumbName = thumbName.partition("_small")[0]
if thumbName.find(characterName[0:-1]) == 0:
cmds.symbolButton(self.widgets["activeCharacterThumb"], edit = True, image = thumbnailPath + thumb, ann = characterName)
found = True
#if our character wasn't in the self.project, but possibly another project, check now
if found == False:
for project in projects:
thumbnailPath = self.mayaToolsDir + "/General/Icons/ART/Thumbnails/" + project + "/"
thumbs = os.listdir(thumbnailPath)
for thumb in thumbs:
if thumb.find("_small") != -1:
thumbName = thumb.rpartition(".")[0]
thumbName = thumbName.partition("_small")[0]
if thumbName.find(characterName[0:-1]) == 0:
cmds.symbolButton(self.widgets["activeCharacterThumb"], edit = True, image = thumbnailPath + thumb, ann = characterName)
#lstly, repopulate self.controls
self.controls = []
for control in ["head_fk_anim", "neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim", "spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim", "mid_ik_anim", "chest_ik_anim",
"body_anim", "hip_anim", "clavicle_l_anim", "clavicle_r_anim", "fk_arm_l_anim", "fk_arm_r_anim", "fk_elbow_l_anim", "fk_elbow_r_anim", "fk_wrist_l_anim", "fk_wrist_r_anim",
"ik_elbow_l_anim", "ik_elbow_r_anim", "ik_wrist_l_anim", "ik_wrist_r_anim", "fk_thigh_l_anim", "fk_thigh_r_anim", "fk_calf_l_anim", "fk_calf_r_anim", "fk_foot_l_anim", "fk_foot_r_anim",
"fk_ball_l_anim", "fk_ball_r_anim", "ik_foot_anim_l", "ik_foot_anim_r", "heel_ctrl_l", "heel_ctrl_r", "toe_wiggle_ctrl_l", "toe_wiggle_ctrl_r",
"toe_tip_ctrl_l", "toe_tip_ctrl_r", "master_anim", "offset_anim", "root_anim", "upperarm_l_twist_anim", "upperarm_l_twist_2_anim", "upperarm_l_twist_3_anim", "upperarm_r_twist_anim", "upperarm_r_twist_2_anim", "upperarm_r_twist_3_anim", "l_thigh_twist_01_anim", "r_thigh_twist_01_anim",
"pinky_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_r", "pinky_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_r",
"ring_metacarpal_ctrl_l", "ring_metacarpal_ctrl_r", "ring_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_r",
"middle_metacarpal_ctrl_l", "middle_metacarpal_ctrl_r", "middle_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_r",
"index_metacarpal_ctrl_l", "index_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_l", "index_finger_fk_ctrl_3_r",
"thumb_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_3_r",
"index_l_ik_anim", "index_r_ik_anim", "middle_l_ik_anim", "middle_r_ik_anim", "ring_l_ik_anim", "ring_r_ik_anim", "pinky_l_ik_anim", "pinky_r_ik_anim", "thumb_l_ik_anim", "thumb_r_ik_anim",
"index_l_poleVector", "index_r_poleVector", "middle_l_poleVector", "middle_r_poleVector", "ring_l_poleVector", "ring_r_poleVector", "pinky_l_poleVector", "pinky_r_poleVector", "thumb_l_poleVector", "thumb_r_poleVector",
"l_global_ik_anim", "r_global_ik_anim", "lowerarm_l_twist_anim", "lowerarm_l_twist2_anim", "lowerarm_l_twist3_anim", "lowerarm_r_twist_anim", "lowerarm_r_twist2_anim", "lowerarm_r_twist3_anim", "calf_r_twist_anim", "calf_r_twist2_anim", "calf_r_twist3_anim",
"calf_l_twist_anim", "calf_l_twist2_anim", "calf_l_twist3_anim", "thigh_l_twist_2_anim", "thigh_l_twist_3_anim", "thigh_r_twist_2_anim", "thigh_r_twist_3_anim"]:
self.controls.append(control)
#hack
character = characterName
for obj in ["fk_clavicle_l_anim", "fk_clavicle_r_anim"]:
if cmds.objExists(character + ":" + obj):
self.controls.append(obj)
#find custom joints
character = characterName
customJoints = []
attrs = cmds.listAttr(character + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
for joint in customJoints:
attribute = cmds.getAttr(character + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "leaf":
label = label.partition(" (")[0]
control = label + "_anim"
self.controls.append(control)
if jointType == "jiggle":
control = label + "_anim"
self.controls.append(control)
if jointType == "chain" or jointType == "dynamic":
numJointsInChain = label.partition("(")[2].partition(")")[0]
label = label.partition(" (")[0]
self.controls.append(label + "_dyn_anim")
cmds.select("*:" + label + "_ik_*_anim")
selection = cmds.ls(sl = True)
for each in selection:
niceName = each.partition(":")[2]
self.controls.append(niceName)
for i in range(int(numJointsInChain)):
self.controls.append("fk_" + label + "_0" + str(i + 1) + "_anim")
self.controls.append(label + "_cv_" + str(i) + "_anim")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def populateCharacterRigList(self, *args):
characters = self.getCharacters()
for character in characters:
cmds.menuItem(label = character, parent = self.widgets["characterRigList"], c = partial(self.switchActiveCharacter, character))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchActiveCharacter(self, character, *args):
#change the thumbnail
self.setThumbnail(character, self.project)
#change the visibility of the character pickers and show the correct one
characters = self.getCharacters()
for char in characters:
cmds.columnLayout(self.widgets[char + "_characterPickerLayout"], edit = True, visible = False)
cmds.columnLayout(self.widgets[char + "_rigSettingsMainColumn"], edit = True, visible = False)
cmds.scrollLayout(self.widgets[char + "_listViewMainLayout"], edit = True, visible = False)
cmds.columnLayout(self.widgets[character + "_characterPickerLayout"], edit = True, visible = True)
cmds.columnLayout(self.widgets[character + "_rigSettingsMainColumn"], edit = True, visible = True)
cmds.scrollLayout(self.widgets[character + "_listViewMainLayout"], edit = True, visible = True)
#set the visibility toggle iconTextCheckBox to represent the current character's control visibility
shape = cmds.listRelatives(character + ":" + "body_anim", shapes = True)[0]
visible = cmds.getAttr(shape + ".v")
if visible == False:
cmds.iconTextCheckBox(self.widgets["pickerControlVisibility"], edit = True, value = False)
if visible == True:
cmds.iconTextCheckBox(self.widgets["pickerControlVisibility"], edit = True, value = True)
#check to see if space switch window is open
if cmds.window("spaceSwitcherUI", exists = True):
title = cmds.window("spaceSwitcherUI", q = True, title = True)
if character != title:
if cmds.button("spaceSwitchSyncStatusButton", q = True, exists = True):
cmds.button("spaceSwitchSyncStatusButton", edit = True, visible = True)
else:
cmds.deleteUI("spaceSwitcherUI")
#check to see if pose editor is open
if cmds.window("poseEditorUI", exists = True):
peTitle = cmds.window("poseEditorUI", q = True, title = True)
if character != peTitle:
if cmds.button("poseEditor_syncStatusButton", q = True, exists = True):
cmds.button("poseEditor_syncStatusButton", edit = True, visible = True)
else:
cmds.deleteUI("poseEditorUI")
#enable menu items
cmds.menuItem(self.widgets["isoSelect_Torso"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_LeftArm"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_RightArm"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_LeftLeg"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_RightLeg"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_Head"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_ShowAll"], edit = True, enable = False)
cmds.menuItem(self.widgets["isoSelect_Generate"], edit = True, enable = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getCharacters(self):
referenceNodes = []
references = cmds.ls(type = "reference")
print references
for reference in references:
niceName = reference.rpartition("RN")[0]
print niceName
suffix = reference.rpartition("RN")[2]
print suffix
if suffix != "":
if cmds.objExists(niceName + suffix + ":" + "Skeleton_Settings"):
referenceNodes.append(niceName + suffix)
else:
if cmds.objExists(niceName + ":" + "Skeleton_Settings"):
referenceNodes.append(niceName)
print referenceNodes
return referenceNodes
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createCharacterPicker(self, name, layout):
#fist thing to do is create the form layout that needs to be parented to the passed in layout
self.widgets[name + "_characterPickerLayout"] = cmds.columnLayout(w = 470, h = 700, parent = layout, visible = False)
self.formsToHide.append(self.widgets[name + "_characterPickerLayout"])
#setup button color variables
self.blue = [.09, .75, .96]
self.white = [1, 1, 1]
self.orange = [1, .68, 0]
self.purple = [.5, .09, .96]
self.green = [0, 1, .16]
#create the body frame Layout
self.widgets[name + "_bodyFrame"] = cmds.frameLayout(label = "Body", collapse = False, collapsable = True, borderStyle = "in", w = 370, h = 470, parent = self.widgets[name + "_characterPickerLayout"], cc = partial(self.collapseCommand, name, "Body"), ec = partial(self.expandCommand, name, "Body"))
self.widgets[name + "_bodyPickerForm"] = cmds.formLayout(w = 370, h = 470, parent = self.widgets[name + "_bodyFrame"])
#background image for body picker
image = self.mayaToolsDir + "/General/Icons/ART/picker.jpg"
self.widgets[name + "_cpBackground"] = cmds.image(w = 370, h = 450, image = image, parent = self.widgets[name + "_bodyPickerForm"])
#create the body picker controls
self.createBodyPicker(name, self.widgets[name + "_bodyPickerForm"])
#create the fingers frame Layout
self.widgets[name + "_fingersFrame"] = cmds.frameLayout(label = "Fingers", collapse = False, collapsable = True, borderStyle = "in", w = 370, h = 205, parent = self.widgets[name + "_characterPickerLayout"], cc = partial(self.collapseCommand, name, "Fingers"), ec = partial(self.expandCommand, name, "Fingers"))
self.widgets[name + "_fingerPickerForm"] = cmds.formLayout(w = 370, h = 205, parent = self.widgets[name + "_fingersFrame"])
#background image for finger picker
image = self.mayaToolsDir + "/General/Icons/ART/fingerPicker.jpg"
self.widgets[name + "_fingerPickerBackground"] = cmds.image(w = 370, h = 205, image = image, parent = self.widgets[name + "_fingerPickerForm"])
#create the finger picker controls
self.createFingersPicker(name, self.widgets[name + "_fingerPickerForm"])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createBodyPicker(self, name, layout, *args):
#get settings off of skeleton settings node to know what it is we need to create
numNeckBones = cmds.getAttr(name + ":Skeleton_Settings.numNeckBones")
numSpineBones = cmds.getAttr(name + ":Skeleton_Settings.numSpineBones")
leftArmTwist = cmds.getAttr(name + ":Skeleton_Settings.leftUpperArmTwist")
leftArmForeTwist = cmds.getAttr(name + ":Skeleton_Settings.leftLowerArmTwist")
rightArmTwist = cmds.getAttr(name + ":Skeleton_Settings.rightUpperArmTwist")
rightArmForeTwist = cmds.getAttr(name + ":Skeleton_Settings.rightLowerArmTwist")
leftThighTwist = cmds.getAttr(name + ":Skeleton_Settings.leftUpperLegTwist")
leftCalfTwist = cmds.getAttr(name + ":Skeleton_Settings.leftLowerLegTwist")
rightThighTwist = cmds.getAttr(name + ":Skeleton_Settings.rightUpperLegTwist")
rightCalfTwist = cmds.getAttr(name + ":Skeleton_Settings.rightLowerLegTwist")
attrList = cmds.listAttr(name + ":Skeleton_Settings", shortNames=True)
for i in attrList:
if i == "leftLowerLegHeelTwist":
leftHeelTwist = cmds.getAttr(name + ":Skeleton_Settings.leftLowerLegHeelTwist")
if i == "rightLowerLegHeelTwist":
rightHeelTwist = cmds.getAttr(name + ":Skeleton_Settings.rightLowerLegHeelTwist")
leftBall = cmds.getAttr(name + ":Skeleton_Settings.leftball")
rightBall = cmds.getAttr(name + ":Skeleton_Settings.rightball")
numLeftToes = cmds.getAttr(name + ":Skeleton_Settings.numLeftToes", asString = True)
numRightToes = cmds.getAttr(name + ":Skeleton_Settings.numRightToes", asString = True)
#create and place each body part's buttons
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#head
self.widgets[name + "_headPickerButton"] = cmds.button(w = 50, h = 50, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "head_fk_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_headPickerButton"], "top", 23), (self.widgets[name + "_headPickerButton"], "left", 159)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#neck
buttonHeight = int(40/numNeckBones)
if int(numNeckBones) == 3:
basePosition = 103
if int(numNeckBones) == 2:
basePosition = 94
if int(numNeckBones) == 1:
basePosition = 76
for i in range(int(numNeckBones)):
self.widgets[name + "_neck" + str(i + 1) + "_PickerButton"] = cmds.button(w = 32, h = buttonHeight, label = "", bgc = self.blue, c = partial(self.buttonSelectCommand, name, "neck_0" + str(i + 1) + "_fk_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_neck" + str(i + 1) + "_PickerButton"], "top", basePosition), (self.widgets[name + "_neck" + str(i + 1) + "_PickerButton"], "left", 170)])
basePosition = basePosition - buttonHeight
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik spine
if int(numSpineBones) == 5:
midPos = 158
topPos = 118
buttonHeight = 15
if int(numSpineBones) == 4:
midPos = 168
topPos = 116
buttonHeight = 19
if int(numSpineBones) == 3:
midPos = 154
topPos = 120
buttonHeight = 25
if int(numSpineBones) > 2:
self.widgets[name + "_ikSpineMidPickerButton"] = cmds.button(w = 120, h = buttonHeight, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "mid_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_ikSpineMidPickerButton"], "top", midPos), (self.widgets[name + "_ikSpineMidPickerButton"], "left", 126)])
self.widgets[name + "_ikSpineTopPickerButton"] = cmds.button(w = 120, h = buttonHeight, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "chest_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_ikSpineTopPickerButton"], "top", topPos), (self.widgets[name + "_ikSpineTopPickerButton"], "left", 126)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk spine
buttonHeight = int(75/numSpineBones)
if int(numSpineBones) == 5:
basePosition = 198
space = 5
if int(numSpineBones) == 4:
basePosition = 194
space = 7
if int(numSpineBones) == 3:
basePosition = 188
space = 9
if int(numSpineBones) == 2:
basePosition = 175
space = 17
for i in range(int(numSpineBones)):
self.widgets[name + "_spine" + str(i + 1) + "_PickerButton"] = cmds.button(w = 80, h = buttonHeight, label = "", bgc = self.blue, c = partial(self.buttonSelectCommand, name, "spine_0" + str(i + 1) + "_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_spine" + str(i + 1) + "_PickerButton"], "top", basePosition), (self.widgets[name + "_spine" + str(i + 1) + "_PickerButton"], "left", 144)])
basePosition = (basePosition - buttonHeight) - space
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#body and pelvis
self.widgets[name + "_bodyPickerButton"] = cmds.button(w = 100, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "body_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_bodyPickerButton"], "top", 218), (self.widgets[name + "_bodyPickerButton"], "left", 134)])
self.widgets[name + "_pelvisPickerButton"] = cmds.button(w = 80, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "hip_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_pelvisPickerButton"], "top", 240), (self.widgets[name + "_pelvisPickerButton"], "left", 144)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#clavicles
self.widgets[name + "_leftClavPickerButton"] = cmds.button(w = 50, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.clavSelectCommand, name, "fk_clavicle_l_anim", "clavicle_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftClavPickerButton"], "top", 94), (self.widgets[name + "_leftClavPickerButton"], "right", 116)])
self.widgets[name + "_rightClavPickerButton"] = cmds.button(w = 50, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.clavSelectCommand, name, "fk_clavicle_r_anim", "clavicle_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightClavPickerButton"], "top", 94), (self.widgets[name + "_rightClavPickerButton"], "left", 116)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk upper arms
self.widgets[name + "_leftShoulderPickerButton"] = cmds.button(w = 78, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_arm_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftShoulderPickerButton"], "top", 94), (self.widgets[name + "_leftShoulderPickerButton"], "right", 36)])
self.widgets[name + "_rightShoulderPickerButton"] = cmds.button(w = 78, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_arm_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightShoulderPickerButton"], "top", 94), (self.widgets[name + "_rightShoulderPickerButton"], "left", 36)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk lower arms
self.widgets[name + "_leftElbowPickerButton"] = cmds.button(w = 20, h = 78, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_elbow_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftElbowPickerButton"], "top", 142), (self.widgets[name + "_leftElbowPickerButton"], "right", 35)])
self.widgets[name + "_rightElbowPickerButton"] = cmds.button(w = 20, h = 78, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_elbow_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightElbowPickerButton"], "top", 142), (self.widgets[name + "_rightElbowPickerButton"], "left", 35)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk hands
self.widgets[name + "_leftHandPickerButton"] = cmds.button(w = 40, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_wrist_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftHandPickerButton"], "top", 247), (self.widgets[name + "_leftHandPickerButton"], "right", 24)])
self.widgets[name + "_rightHandPickerButton"] = cmds.button(w = 40, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_wrist_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightHandPickerButton"], "top", 247), (self.widgets[name + "_rightHandPickerButton"], "left", 24)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik elbows
self.widgets[name + "_leftIkElbowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_elbow_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkElbowPickerButton"], "top", 118), (self.widgets[name + "_leftIkElbowPickerButton"], "right", 35)])
self.widgets[name + "_rightIkElbowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_elbow_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkElbowPickerButton"], "top", 118), (self.widgets[name + "_rightIkElbowPickerButton"], "left", 35)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik hands
self.widgets[name + "_leftIkHandPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_wrist_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkHandPickerButton"], "top", 222), (self.widgets[name + "_leftIkHandPickerButton"], "right", 35)])
self.widgets[name + "_rightIkHandPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_wrist_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkHandPickerButton"], "top", 222), (self.widgets[name + "_rightIkHandPickerButton"], "left", 35)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# LEGS #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
attrList = cmds.listAttr(name + ":Skeleton_Settings", shortNames=True)
for i in attrList:
if i == "legStyle":
legStyle = cmds.getAttr(name+":Skeleton_Settings.legStyle")
print "LEGSTYLE!!!!!!!!!!!!!!!!!!!!!"
print legStyle
#fk thighs
self.widgets[name + "_leftThighPickerButton"] = cmds.button(w = 20, h = 80, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_thigh_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThighPickerButton"], "top", 240), (self.widgets[name + "_leftThighPickerButton"], "right", 124)])
self.widgets[name + "_rightThighPickerButton"] = cmds.button(w = 20, h = 80, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_thigh_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThighPickerButton"], "top", 240), (self.widgets[name + "_rightThighPickerButton"], "left", 124)])
if legStyle == 0:
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk knees
self.widgets[name + "_leftFkKneePickerButton"] = cmds.button(w = 20, h = 80, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_calf_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftFkKneePickerButton"], "top", 343), (self.widgets[name + "_leftFkKneePickerButton"], "right", 124)])
self.widgets[name + "_rightFkKneePickerButton"] = cmds.button(w = 20, h = 80, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_calf_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightFkKneePickerButton"], "top", 343), (self.widgets[name + "_rightFkKneePickerButton"], "left", 124)])
if legStyle == 1:
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk knees
self.widgets[name + "_leftFkKneePickerButton"] = cmds.button(w = 20, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_calf_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftFkKneePickerButton"], "top", 343), (self.widgets[name + "_leftFkKneePickerButton"], "right", 124)])
self.widgets[name + "_rightFkKneePickerButton"] = cmds.button(w = 20, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_calf_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightFkKneePickerButton"], "top", 343), (self.widgets[name + "_rightFkKneePickerButton"], "left", 124)])
#fk heel
self.widgets[name + "_leftFkHeelPickerButton"] = cmds.button(w = 20, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_heel_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftFkHeelPickerButton"], "top", 383), (self.widgets[name + "_leftFkHeelPickerButton"], "right", 124)])
self.widgets[name + "_rightFkHeelPickerButton"] = cmds.button(w = 20, h = 40, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_heel_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightFkHeelPickerButton"], "top", 383), (self.widgets[name + "_rightFkHeelPickerButton"], "left", 124)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk ankle
self.widgets[name + "_leftFkAnklePickerButton"] = cmds.button(w = 40, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_foot_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftFkAnklePickerButton"], "top", 422), (self.widgets[name + "_leftFkAnklePickerButton"], "right", 82)])
self.widgets[name + "_rightFkAnklePickerButton"] = cmds.button(w = 40, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_foot_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightFkAnklePickerButton"], "top", 422), (self.widgets[name + "_rightFkAnklePickerButton"], "left", 82)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#fk ball
if leftBall:
self.widgets[name + "_leftFkBallPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_ball_l_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftFkBallPickerButton"], "top", 422), (self.widgets[name + "_leftFkBallPickerButton"], "right", 59)])
if rightBall:
self.widgets[name + "_rightFkBallPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "fk_ball_r_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightFkBallPickerButton"], "top", 422), (self.widgets[name + "_rightFkBallPickerButton"], "left", 59)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik knees
self.widgets[name + "_leftIkKneePickerButton"] = cmds.floatField(w = 40, h = 20, parent = layout, step = 1, minValue = -360, maxValue = 360, precision = 1, ann = "Ctrl + MMB to drag invisible slider")
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkKneePickerButton"], "top", 321), (self.widgets[name + "_leftIkKneePickerButton"], "right", 114)])
self.widgets[name + "_rightIkKneePickerButton"] = cmds.floatField(w = 40, h = 20, parent = layout, step = 1, minValue = -360, maxValue = 360, precision = 1, ann = "Ctrl + MMB to drag invisible slider")
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkKneePickerButton"], "top", 321), (self.widgets[name + "_rightIkKneePickerButton"], "left", 114)])
cmds.connectControl(self.widgets[name + "_leftIkKneePickerButton"] , name + ":ik_foot_anim_l.knee_twist")
cmds.connectControl(self.widgets[name + "_rightIkKneePickerButton"] , name + ":ik_foot_anim_r.knee_twist")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik feet
self.widgets[name + "_leftIkFootPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_foot_anim_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkFootPickerButton"], "top", 422), (self.widgets[name + "_leftIkFootPickerButton"], "right", 124)])
self.widgets[name + "_rightIkFootPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ik_foot_anim_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkFootPickerButton"], "top", 422), (self.widgets[name + "_rightIkFootPickerButton"], "left", 124)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik heels
self.widgets[name + "_leftIkHeelPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "heel_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkHeelPickerButton"], "top", 428), (self.widgets[name + "_leftIkHeelPickerButton"], "right", 149)])
self.widgets[name + "_rightIkHeelPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "heel_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkHeelPickerButton"], "top", 428), (self.widgets[name + "_rightIkHeelPickerButton"], "left", 149)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik toe wiggles
self.widgets[name + "_leftIkToeWigglePickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "toe_wiggle_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkToeWigglePickerButton"], "top", 402), (self.widgets[name + "_leftIkToeWigglePickerButton"], "right", 74)])
self.widgets[name + "_rightIkToeWigglePickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "toe_wiggle_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkToeWigglePickerButton"], "top", 402), (self.widgets[name + "_rightIkToeWigglePickerButton"], "left", 74)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#ik toes
self.widgets[name + "_leftIkToePickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "toe_tip_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkToePickerButton"], "top", 428), (self.widgets[name + "_leftIkToePickerButton"], "right", 40)])
self.widgets[name + "_rightIkToePickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "toe_tip_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkToePickerButton"], "top", 428), (self.widgets[name + "_rightIkToePickerButton"], "left", 40)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#master, offset, and root
self.widgets[name + "_masterPickerButton"] = cmds.button(w = 20, h = 20, label = "M", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "master_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_masterPickerButton"], "top", 401), (self.widgets[name + "_masterPickerButton"], "right", 175)])
self.widgets[name + "_offsetPickerButton"] = cmds.button(w = 20, h = 20, label = "O", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "offset_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_offsetPickerButton"], "top", 375), (self.widgets[name + "_offsetPickerButton"], "right", 175)])
self.widgets[name + "_rootPickerButton"] = cmds.button(w = 20, h = 20, label = "R", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "root_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rootPickerButton"], "top", 350), (self.widgets[name + "_rootPickerButton"], "right", 175)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select head group
self.widgets[name + "_headGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["head_fk_anim", "neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_headGroupPickerButton"], "top", 2), (self.widgets[name + "_headGroupPickerButton"], "right", 178)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select spine group
self.widgets[name + "_spineGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["hip_anim", "body_anim", "spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim", "mid_ik_anim", "chest_ik_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_spineGroupPickerButton"], "top", 264), (self.widgets[name + "_spineGroupPickerButton"], "right", 178)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select left arm group
self.widgets[name + "_leftArmGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ik_wrist_l_anim", "ik_elbow_l_anim", "fk_arm_l_anim", "fk_elbow_l_anim", "fk_wrist_l_anim", "clavicle_l_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftArmGroupPickerButton"], "top", 75), (self.widgets[name + "_leftArmGroupPickerButton"], "right", 117)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select right arm group
self.widgets[name + "_rightArmGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ik_wrist_r_anim", "ik_elbow_r_anim", "fk_arm_r_anim", "fk_elbow_r_anim", "fk_wrist_r_anim", "clavicle_r_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightArmGroupPickerButton"], "top", 75), (self.widgets[name + "_rightArmGroupPickerButton"], "left", 117)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select left leg group
self.widgets[name + "_leftLegGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["fk_thigh_l_anim", "fk_calf_l_anim", "fk_foot_l_anim", "fk_ball_l_anim", "ik_foot_anim_l", "heel_ctrl_l", "toe_wiggle_ctrl_l", "toe_tip_ctrl_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftLegGroupPickerButton"], "top", 222), (self.widgets[name + "_leftLegGroupPickerButton"], "right", 114)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select left leg ik ctrls
self.widgets[name + "_leftLegIKGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ik_foot_anim_l", "heel_ctrl_l", "toe_wiggle_ctrl_l", "toe_tip_ctrl_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftLegIKGroupPickerButton"], "top", 408), (self.widgets[name + "_leftLegIKGroupPickerButton"], "right", 40)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select right leg group
self.widgets[name + "_rightLegGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["fk_thigh_r_anim", "fk_calf_r_anim", "fk_foot_r_anim", "fk_ball_r_anim", "ik_foot_anim_r", "heel_ctrl_r", "toe_wiggle_ctrl_r", "toe_tip_ctrl_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightLegGroupPickerButton"], "top", 222), (self.widgets[name + "_rightLegGroupPickerButton"], "left", 114)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#select right leg ik ctrls
self.widgets[name + "_rightLegIKGroupPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ik_foot_anim_r", "heel_ctrl_r", "toe_wiggle_ctrl_r", "toe_tip_ctrl_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightLegIKGroupPickerButton"], "top", 408), (self.widgets[name + "_rightLegIKGroupPickerButton"], "left", 40)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#arm rolls
if leftArmTwist > 0:
self.widgets[name + "_leftArmRollPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_l_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftArmRollPickerButton"], "top", 75), (self.widgets[name + "_leftArmRollPickerButton"], "right", 76)])
if leftArmTwist > 1:
self.widgets[name + "_leftArmRoll2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_l_twist_2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftArmRoll2PickerButton"], "top", 75), (self.widgets[name + "_leftArmRoll2PickerButton"], "right", 56)])
if leftArmTwist > 2:
self.widgets[name + "_leftArmRoll3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_l_twist_3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftArmRoll3PickerButton"], "top", 75), (self.widgets[name + "_leftArmRoll3PickerButton"], "right", 36)])
if rightArmTwist > 0:
self.widgets[name + "_rightArmRollPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_r_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightArmRollPickerButton"], "top", 75), (self.widgets[name + "_rightArmRollPickerButton"], "left", 76)])
if rightArmTwist > 1:
self.widgets[name + "_rightArmRoll2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_r_twist_2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightArmRoll2PickerButton"], "top", 75), (self.widgets[name + "_rightArmRoll2PickerButton"], "left", 56)])
if rightArmTwist > 2:
self.widgets[name + "_rightArmRoll3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "upperarm_r_twist_3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightArmRoll3PickerButton"], "top", 75), (self.widgets[name + "_rightArmRoll3PickerButton"], "left", 36)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#forearm twists
if leftArmForeTwist > 0:
self.widgets[name + "_leftForeTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_l_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftForeTwistPickerButton"], "top", 190), (self.widgets[name + "_leftForeTwistPickerButton"], "right", 15)])
if leftArmForeTwist > 1:
self.widgets[name + "_leftForeTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_l_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftForeTwist2PickerButton"], "top", 170), (self.widgets[name + "_leftForeTwist2PickerButton"], "right", 15)])
if leftArmForeTwist > 2:
self.widgets[name + "_leftForeTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_l_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftForeTwist3PickerButton"], "top", 150), (self.widgets[name + "_leftForeTwist3PickerButton"], "right", 15)])
if rightArmForeTwist > 0:
self.widgets[name + "_rightForeTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_r_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightForeTwistPickerButton"], "top", 190), (self.widgets[name + "_rightForeTwistPickerButton"], "left", 15)])
if rightArmForeTwist > 1:
self.widgets[name + "_rightForeTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_r_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightForeTwist2PickerButton"], "top", 170), (self.widgets[name + "_rightForeTwist2PickerButton"], "left", 15)])
if rightArmForeTwist > 2:
self.widgets[name + "_rightForeTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "lowerarm_r_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightForeTwist3PickerButton"], "top", 150), (self.widgets[name + "_rightForeTwist3PickerButton"], "left", 15)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#thigh twists
if leftThighTwist > 0:
self.widgets[name + "_leftThighTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "l_thigh_twist_01_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThighTwistPickerButton"], "top", 241), (self.widgets[name + "_leftThighTwistPickerButton"], "right", 106)])
if leftThighTwist > 1:
self.widgets[name + "_leftThighTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "l_thigh_twist_02_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThighTwist2PickerButton"], "top", 261), (self.widgets[name + "_leftThighTwist2PickerButton"], "right", 106)])
if leftThighTwist > 2:
self.widgets[name + "_leftThighTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "l_thigh_twist_03_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThighTwist3PickerButton"], "top", 281), (self.widgets[name + "_leftThighTwist3PickerButton"], "right", 106)])
if rightThighTwist > 0:
self.widgets[name + "_rightThighTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "r_thigh_twist_01_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThighTwistPickerButton"], "top", 241), (self.widgets[name + "_rightThighTwistPickerButton"], "left", 106)])
if rightThighTwist > 1:
self.widgets[name + "_rightThighTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "r_thigh_twist_02_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThighTwist2PickerButton"], "top", 261), (self.widgets[name + "_rightThighTwist2PickerButton"], "left", 106)])
if rightThighTwist > 2:
self.widgets[name + "_rightThighTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "r_thigh_twist_03_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThighTwist3PickerButton"], "top", 281), (self.widgets[name + "_rightThighTwist3PickerButton"], "left", 106)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#calf twists
if legStyle == 0:
values = [400, 380, 360]
if legStyle == 1:
values = [360, 340, 320]
if leftCalfTwist > 0:
self.widgets[name + "_leftCalfTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_l_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftCalfTwistPickerButton"], "top", values[0]), (self.widgets[name + "_leftCalfTwistPickerButton"], "right", 106)])
if leftCalfTwist > 1:
self.widgets[name + "_leftCalfTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_l_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftCalfTwist2PickerButton"], "top", values[1]), (self.widgets[name + "_leftCalfTwist2PickerButton"], "right", 106)])
if leftCalfTwist > 2:
self.widgets[name + "_leftCalfTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_l_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftCalfTwist3PickerButton"], "top", values[2]), (self.widgets[name + "_leftCalfTwist3PickerButton"], "right", 106)])
if rightCalfTwist > 0:
self.widgets[name + "_rightCalfTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_r_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightCalfTwistPickerButton"], "top", values[0]), (self.widgets[name + "_rightCalfTwistPickerButton"], "left", 106)])
if rightCalfTwist > 1:
self.widgets[name + "_rightCalfTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_r_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightCalfTwist2PickerButton"], "top", values[1]), (self.widgets[name + "_rightCalfTwist2PickerButton"], "left", 106)])
if rightCalfTwist > 2:
self.widgets[name + "_rightCalfTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "calf_r_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightCalfTwist3PickerButton"], "top", values[2]), (self.widgets[name + "_rightCalfTwist3PickerButton"], "left", 106)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#heel twists
if legStyle == 1:
values = [400, 380, 360]
if leftHeelTwist > 0:
self.widgets[name + "_leftHeelTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_l_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftHeelTwistPickerButton"], "top", values[0]), (self.widgets[name + "_leftHeelTwistPickerButton"], "right", 106)])
if leftHeelTwist > 1:
self.widgets[name + "_leftHeelTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_l_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftHeelTwist2PickerButton"], "top", values[1]), (self.widgets[name + "_leftHeelTwist2PickerButton"], "right", 106)])
if leftHeelTwist > 2:
self.widgets[name + "_leftHeelTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_l_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftHeelTwist3PickerButton"], "top", values[2]), (self.widgets[name + "_leftHeelTwist3PickerButton"], "right", 106)])
if rightHeelTwist > 0:
self.widgets[name + "_rightHeelTwistPickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_r_twist_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightHeelTwistPickerButton"], "top", values[0]), (self.widgets[name + "_rightHeelTwistPickerButton"], "left", 106)])
if rightHeelTwist > 1:
self.widgets[name + "_rightHeelTwist2PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_r_twist2_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightHeelTwist2PickerButton"], "top", values[1]), (self.widgets[name + "_rightHeelTwist2PickerButton"], "left", 106)])
if rightHeelTwist > 2:
self.widgets[name + "_rightHeelTwist3PickerButton"] = cmds.button(w = 15, h = 15, label = "", parent = layout, bgc = self.purple, c = partial(self.buttonSelectCommand, name, "heel_r_twist3_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightHeelTwist3PickerButton"], "top", values[2]), (self.widgets[name + "_rightHeelTwist3PickerButton"], "left", 106)])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
#Setup right click menus for each of the limbs that can switch modes
#spine
for i in range(int(numSpineBones)):
buttonName = name + "_spine" + str(i + 1) + "_PickerButton"
menu = cmds.popupMenu(b = 3, parent = self.widgets[buttonName])
cmds.menuItem(label = "Spine FK Mode", parent = menu, c = partial(self.switchSpineMode, name, "FK"))
cmds.menuItem(label = "Spine IK Mode", parent = menu, c = partial(self.switchSpineMode, name, "IK"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "IK", "FK"))
if i == 0:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_spine1_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_spine1_RadioCollection"] , "spine_01_space_switcher"))
if int(numSpineBones) > 2:
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_ikSpineMidPickerButton"])
cmds.menuItem(label = "Spine FK Mode", parent = menu, c = partial(self.switchSpineMode, name, "FK"))
cmds.menuItem(label = "Spine IK Mode", parent = menu, c = partial(self.switchSpineMode, name, "IK"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "IK", "FK"))
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_ikSpineTopPickerButton"])
cmds.menuItem(label = "Spine FK Mode", parent = menu, c = partial(self.switchSpineMode, name, "FK"))
cmds.menuItem(label = "Spine IK Mode", parent = menu, c = partial(self.switchSpineMode, name, "IK"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "spine", None, "IK", "FK"))
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_chestIkSpine_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_chestIkSpine_RadioCollection"] , "chest_ik_anim_space_switcher"))
#Arms
for button in[self.widgets[name + "_leftClavPickerButton"], self.widgets[name + "_leftShoulderPickerButton"], self.widgets[name + "_leftElbowPickerButton"], self.widgets[name + "_leftHandPickerButton"], self.widgets[name + "_leftIkElbowPickerButton"], self.widgets[name + "_leftIkHandPickerButton"]]:
menu = cmds.popupMenu(b = 3, parent = button)
cmds.menuItem(label = "Arm FK Mode", parent = menu, c = partial(self.switchArmMode, name, "FK", "l"))
cmds.menuItem(label = "Arm IK Mode", parent = menu, c = partial(self.switchArmMode, name, "IK", "l"))
#hack to get new fk clav rig matching functionality. will eventually be replaced with something more graceful when I change it over to a modular system
if button == self.widgets[name + "_leftClavPickerButton"]:
if cmds.objExists(name + ":fk_clavicle_l_anim"):
cmds.menuItem(label = "Clavicle FK Mode", parent = menu, c = partial(self.switchClavMode, name, "FK", "l"))
cmds.menuItem(label = "Clavicle IK Mode", parent = menu, c = partial(self.switchClavMode, name, "IK", "l"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "arm", "l", "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "arm", "l", "IK", "FK"))
subMenu = cmds.menuItem(label = "Arm FK Orientation Space", parent = menu, subMenu = True)
mode = cmds.getAttr(name + ":Rig_Settings.lFkArmOrient")
if mode == 0:
clavVal = True
bodyVal = False
worldVal = False
if mode == 1:
clavVal = False
bodyVal = True
worldVal = False
if mode == 2:
clavVal = False
bodyVal = False
worldVal = True
self.widgets[name + "_" + button + "_leftArm_RadioCollection"] = cmds.radioMenuItemCollection(parent = subMenu)
self.widgets[name + "_" + button + "_leftArm_ClavSpace"] = cmds.menuItem(label = "Clavicle Space(default fk behavior)", parent = subMenu, cl = self.widgets[name + "_" + button + "_leftArm_RadioCollection"], rb =clavVal, c = partial(self.switchArmOrientMode, name, 0, "l"))
self.widgets[name + "_" + button + "_leftArm_BodySpace"] = cmds.menuItem(label = "Body Space", parent = subMenu, cl = self.widgets[name + "_" + button + "_leftArm_RadioCollection"], rb =bodyVal, c = partial(self.switchArmOrientMode, name, 1, "l"))
self.widgets[name + "_" + button + "_leftArm_WrldSpace"] = cmds.menuItem(label = "World Space", parent = subMenu, cl = self.widgets[name + "_" + button + "_leftArm_RadioCollection"], rb =worldVal, c = partial(self.switchArmOrientMode, name, 2, "l"))
if button == self.widgets[name + "_leftIkHandPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_leftIkHnad_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_leftIkHnad_RadioCollection"] , "ik_wrist_l_anim_space_switcher"))
if button == self.widgets[name + "_leftIkElbowPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_leftIkElbow_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_leftIkElbow_RadioCollection"] , "ik_elbow_l_anim_space_switcher"))
for button in[self.widgets[name + "_rightClavPickerButton"], self.widgets[name + "_rightShoulderPickerButton"], self.widgets[name + "_rightElbowPickerButton"], self.widgets[name + "_rightHandPickerButton"], self.widgets[name + "_rightIkElbowPickerButton"], self.widgets[name + "_rightIkHandPickerButton"]]:
menu = cmds.popupMenu(b = 3, parent = button)
cmds.menuItem(label = "Arm FK Mode", parent = menu, c = partial(self.switchArmMode, name, "FK", "r"))
cmds.menuItem(label = "Arm IK Mode", parent = menu, c = partial(self.switchArmMode, name, "IK", "r"))
if button == self.widgets[name + "_rightClavPickerButton"]:
if cmds.objExists(name + ":fk_clavicle_r_anim"):
cmds.menuItem(label = "Clavicle FK Mode", parent = menu, c = partial(self.switchClavMode, name, "FK", "r"))
cmds.menuItem(label = "Clavicle IK Mode", parent = menu, c = partial(self.switchClavMode, name, "IK", "r"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "arm", "r", "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "arm", "r", "IK", "FK"))
subMenu = cmds.menuItem(label = "Arm FK Orientation Space", parent = menu, subMenu = True)
mode = cmds.getAttr(name + ":Rig_Settings.rFkArmOrient")
if mode == 0:
clavVal = True
bodyVal = False
worldVal = False
if mode == 1:
clavVal = False
bodyVal = True
worldVal = False
if mode == 2:
clavVal = False
bodyVal = False
worldVal = True
self.widgets[name + "_" + button + "_rightArm_RadioCollection"] = cmds.radioMenuItemCollection(parent = subMenu)
self.widgets[name + "_" + button + "_rightArm_ClavSpace"] = cmds.menuItem(label = "Clavicle Space(default fk behavior)", parent = subMenu, cl = self.widgets[name + "_" + button + "_rightArm_RadioCollection"], rb =clavVal, c = partial(self.switchArmOrientMode, name, 0, "r"))
self.widgets[name + "_" + button + "_rightArm_BodySpace"] = cmds.menuItem(label = "Body Space", parent = subMenu, cl = self.widgets[name + "_" + button + "_rightArm_RadioCollection"], rb =bodyVal, c = partial(self.switchArmOrientMode, name, 1, "r"))
self.widgets[name + "_" + button + "_rightArm_WrldSpace"] = cmds.menuItem(label = "World Space", parent = subMenu, cl = self.widgets[name + "_" + button + "_rightArm_RadioCollection"], rb =worldVal, c = partial(self.switchArmOrientMode, name, 2, "r"))
if button == self.widgets[name + "_rightIkHandPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_rightIkHnad_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_rightIkHnad_RadioCollection"] , "ik_wrist_r_anim_space_switcher"))
if button == self.widgets[name + "_rightIkElbowPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_rightIkElbow_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_rightIkElbow_RadioCollection"] , "ik_elbow_r_anim_space_switcher"))
#Legs
for button in[self.widgets[name + "_leftThighPickerButton"], self.widgets[name + "_leftFkKneePickerButton"], self.widgets[name + "_leftFkAnklePickerButton"], self.widgets[name + "_leftIkFootPickerButton"]]:
menu = cmds.popupMenu(b = 3, parent = button)
cmds.menuItem(label = "Leg FK Mode", parent = menu, c = partial(self.switchLegMode, name, "FK", "l"))
cmds.menuItem(label = "Leg IK Mode", parent = menu, c = partial(self.switchLegMode, name, "IK", "l"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "leg", "l", "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "leg", "l", "IK", "FK"))
if button == self.widgets[name + "_leftIkFootPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_leftIkFoot_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_leftIkFoot_RadioCollection"] , "ik_foot_anim_l_space_switcher"))
for button in[self.widgets[name + "_rightThighPickerButton"], self.widgets[name + "_rightFkKneePickerButton"], self.widgets[name + "_rightFkAnklePickerButton"], self.widgets[name + "_rightIkFootPickerButton"]]:
menu = cmds.popupMenu(b = 3, parent = button)
cmds.menuItem(label = "Leg FK Mode", parent = menu, c = partial(self.switchLegMode, name, "FK", "r"))
cmds.menuItem(label = "Leg IK Mode", parent = menu, c = partial(self.switchLegMode, name, "IK", "r"))
matchMenu = cmds.menuItem(label = "Matching", parent = menu, subMenu = True)
cmds.menuItem(label = "Match FK Rig to current IK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "leg", "r", "FK", "IK"))
cmds.menuItem(label = "Match IK Rig to current FK Pose", parent = matchMenu, c = partial(self.match_singleFrame, "leg", "r", "IK", "FK"))
if button == self.widgets[name + "_rightIkFootPickerButton"]:
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_rightIkFoot_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_rightIkFoot_RadioCollection"] , "ik_foot_anim_r_space_switcher"))
#Head
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_headPickerButton"], postMenuCommand = self.getHeadSpace)
subMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
collection = cmds.radioMenuItemCollection(parent = subMenu)
self.widgets["neckSpaceRB"] = cmds.menuItem(label = "Neck", parent = subMenu, cl = collection, rb = True, c = partial(self.switchHeadOrientMode, name, 0))
self.widgets["shoulderSpaceRB"] = cmds.menuItem(label = "Shoulder", parent = subMenu, cl = collection, rb = False, c = partial(self.switchHeadOrientMode, name, 1))
self.widgets["bodySpaceRB"] = cmds.menuItem(label = "Body", parent = subMenu, cl = collection, rb = False, c = partial(self.switchHeadOrientMode, name, 2))
self.widgets["worldSpaceRB"] = cmds.menuItem(label = "World", parent = subMenu, cl = collection, rb = False, c = partial(self.switchHeadOrientMode, name, 3))
#Neck
if cmds.objExists(name + ":neck_01_fk_anim.fkOrientation"):
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_neck1_PickerButton"], postMenuCommand = self.getNeckSpace)
subMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
collection = cmds.radioMenuItemCollection(parent = subMenu)
self.widgets["neckOrientShoulderSpaceRB"] = cmds.menuItem(label = "Shoulder", parent = subMenu, cl = collection, rb = False, c = partial(self.switchNeckOrientMode, name, 0))
self.widgets["neckOrientBodySpaceRB"] = cmds.menuItem(label = "Body", parent = subMenu, cl = collection, rb = False, c = partial(self.switchNeckOrientMode, name, 1))
self.widgets["neckOrientWorldSpaceRB"] = cmds.menuItem(label = "World", parent = subMenu, cl = collection, rb = False, c = partial(self.switchNeckOrientMode, name, 2))
#Core (body, master)
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_bodyPickerButton"])
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_bodySpaceSwitch_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_bodySpaceSwitch_RadioCollection"] , "body_anim_space_switcher"))
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_masterPickerButton"])
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_masterSpaceSwitch_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_masterSpaceSwitch_RadioCollection"] , "master_anim_space_switcher"))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createFingersPicker(self, name, layout, *args):
#select all fingers buttons
self.widgets[name + "_selectAllLeftFingers"] = cmds.symbolButton(image = self.mayaToolsDir + "/General/Icons/ART/lFingerAll.bmp", w = 175, h = 170, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_l", "middle_metacarpal_ctrl_l", "ring_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_l", "index_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_selectAllLeftFingers"], "top", 5), (self.widgets[name + "_selectAllLeftFingers"], "right", 5)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_selectAllLeftFingers"])
cmds.menuItem(label = "Select all IK Finger Controls", parent = menu, c = partial(self.multiButtonSelectCommand, name, ["index_l_ik_anim", "middle_l_ik_anim", "ring_l_ik_anim", "pinky_l_ik_anim", "thumb_l_ik_anim", "index_l_poleVector", "middle_l_poleVector", "ring_l_poleVector", "pinky_l_poleVector", "thumb_l_poleVector", "l_global_ik_anim"]))
cmds.menuItem(label = "Select FK and IK Finger Controls", parent = menu, c = partial(self.multiButtonSelectCommand, name, ["index_l_ik_anim", "middle_l_ik_anim", "ring_l_ik_anim", "pinky_l_ik_anim", "thumb_l_ik_anim", "index_l_poleVector", "middle_l_poleVector", "ring_l_poleVector", "pinky_l_poleVector", "thumb_l_poleVector", "l_global_ik_anim", "index_metacarpal_ctrl_l", "middle_metacarpal_ctrl_l", "ring_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_l", "index_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_3_l"]))
self.widgets[name + "_selectAllRightFingers"] = cmds.symbolButton(image = self.mayaToolsDir + "/General/Icons/ART/rFingerAll.bmp", w = 175, h = 170, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_r", "middle_metacarpal_ctrl_r", "ring_metacarpal_ctrl_r", "pinky_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_r", "middle_finger_fk_ctrl_3_r", "ring_finger_fk_ctrl_3_r", "pinky_finger_fk_ctrl_3_r", "thumb_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_selectAllRightFingers"], "top", 5), (self.widgets[name + "_selectAllRightFingers"], "left", 5)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_selectAllRightFingers"])
cmds.menuItem(label = "Select all IK Finger Controls", parent = menu, c = partial(self.multiButtonSelectCommand, name, ["index_r_ik_anim", "middle_r_ik_anim", "ring_r_ik_anim", "pinky_r_ik_anim", "thumb_r_ik_anim", "index_r_poleVector", "middle_r_poleVector", "ring_r_poleVector", "pinky_r_poleVector", "thumb_r_poleVector", "r_global_ik_anim"]))
cmds.menuItem(label = "Select FK and IK Finger Controls", parent = menu, c = partial(self.multiButtonSelectCommand, name, ["index_r_ik_anim", "middle_r_ik_anim", "ring_r_ik_anim", "pinky_r_ik_anim", "thumb_r_ik_anim", "index_r_poleVector", "middle_r_poleVector", "ring_r_poleVector", "pinky_r_poleVector", "thumb_r_poleVector", "r_global_ik_anim", "index_metacarpal_ctrl_r", "middle_metacarpal_ctrl_r", "ring_metacarpal_ctrl_r", "pinky_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_r", "middle_finger_fk_ctrl_3_r", "ring_finger_fk_ctrl_3_r", "pinky_finger_fk_ctrl_3_r", "thumb_finger_fk_ctrl_3_r"]))
#Left Pinky
#get settings off of skeleton settings node to know what it is we need to create
leftPinkyMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.leftpinkymeta")
leftPinky1 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftpinky1")
leftPinky2 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftpinky2")
leftPinky3 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftpinky3")
if leftPinkyMeta:
self.widgets[name + "_leftPinkyMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_metacarpal_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinkyMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_leftPinkyMetacarpalPickerButton"], "right", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftPinkyMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "l"))
if leftPinky1:
self.widgets[name + "_leftPinky1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_1_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinky1PickerButton"], "top", 55), (self.widgets[name + "_leftPinky1PickerButton"], "right", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftPinky1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "l"))
if leftPinky2:
self.widgets[name + "_leftPinky2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_2_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinky2PickerButton"], "top", 80), (self.widgets[name + "_leftPinky2PickerButton"], "right", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftPinky2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "l"))
if leftPinky3:
self.widgets[name + "_leftPinky3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_3_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinky3PickerButton"], "top", 105), (self.widgets[name + "_leftPinky3PickerButton"], "right", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftPinky3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "l"))
#Left Ring
#get settings off of skeleton settings node to know what it is we need to create
leftRingMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.leftringmeta")
leftRing1 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftring1")
leftRing2 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftring2")
leftRing3 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftring3")
if leftRingMeta:
self.widgets[name + "_leftRingMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_metacarpal_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRingMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_leftRingMetacarpalPickerButton"], "right", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftRingMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "l"))
if leftRing1:
self.widgets[name + "_leftRing1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_1_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRing1PickerButton"], "top", 55), (self.widgets[name + "_leftRing1PickerButton"], "right", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftRing1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "l"))
if leftRing2:
self.widgets[name + "_leftRing2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_2_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRing2PickerButton"], "top", 80), (self.widgets[name + "_leftRing2PickerButton"], "right", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftRing2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "l"))
if leftRing3:
self.widgets[name + "_leftRing3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_3_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRing3PickerButton"], "top", 105), (self.widgets[name + "_leftRing3PickerButton"], "right", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftRing3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "l"))
#Left Middle
#get settings off of skeleton settings node to know what it is we need to create
leftMiddleMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.leftmiddlemeta")
leftMiddle1 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftmiddle1")
leftMiddle2 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftmiddle2")
leftMiddle3 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftmiddle3")
if leftMiddleMeta:
self.widgets[name + "_leftMiddleMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_metacarpal_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddleMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_leftMiddleMetacarpalPickerButton"], "right", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftMiddleMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "l"))
if leftMiddle1:
self.widgets[name + "_leftMiddle1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_1_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddle1PickerButton"], "top", 55), (self.widgets[name + "_leftMiddle1PickerButton"], "right", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftMiddle1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "l"))
if leftMiddle2:
self.widgets[name + "_leftMiddle2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_2_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddle2PickerButton"], "top", 80), (self.widgets[name + "_leftMiddle2PickerButton"], "right", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftMiddle2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "l"))
if leftMiddle3:
self.widgets[name + "_leftMiddle3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_3_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddle3PickerButton"], "top", 105), (self.widgets[name + "_leftMiddle3PickerButton"], "right", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftMiddle3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "l"))
#Left Index
#get settings off of skeleton settings node to know what it is we need to create
leftIndexMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.leftindexmeta")
leftIndex1 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftindex1")
leftIndex2 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftindex2")
leftIndex3 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftindex3")
if leftIndexMeta:
self.widgets[name + "_leftIndexMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_metacarpal_ctrl_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndexMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_leftIndexMetacarpalPickerButton"], "right", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIndexMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "l"))
if leftIndex1:
self.widgets[name + "_leftIndex1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_1_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndex1PickerButton"], "top", 55), (self.widgets[name + "_leftIndex1PickerButton"], "right", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIndex1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "l"))
if leftIndex2:
self.widgets[name + "_leftIndex2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_2_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndex2PickerButton"], "top", 80), (self.widgets[name + "_leftIndex2PickerButton"], "right", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIndex2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "l"))
if leftIndex3:
self.widgets[name + "_leftIndex3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_3_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndex3PickerButton"], "top", 105), (self.widgets[name + "_leftIndex3PickerButton"], "right", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIndex3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "l"))
#Left Thumb
#get settings off of skeleton settings node to know what it is we need to create
leftThumb1 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftthumb1")
leftThumb2 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftthumb2")
leftThumb3 = cmds.getAttr(name + ":" + "Skeleton_Settings.leftthumb3")
if leftThumb1:
self.widgets[name + "_leftThumb1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_1_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumb1PickerButton"], "top", 30), (self.widgets[name + "_leftThumb1PickerButton"], "right", 132)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftThumb1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "l"))
if leftThumb2:
self.widgets[name + "_leftThumb2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_2_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumb2PickerButton"], "top", 55), (self.widgets[name + "_leftThumb2PickerButton"], "right", 142)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftThumb2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "l"))
if leftThumb3:
self.widgets[name + "_leftThumb3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_3_l"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumb3PickerButton"], "top", 80), (self.widgets[name + "_leftThumb3PickerButton"], "right", 152)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftThumb3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "l"))
#Left finger row globals
#get settings off of skeleton settings node to know what it is we need to create
self.widgets[name + "_leftMetaRowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_l", "middle_metacarpal_ctrl_l", "ring_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMetaRowPickerButton"], "top", 30), (self.widgets[name + "_leftMetaRowPickerButton"], "right", 7)])
self.widgets[name + "_leftKnuckle1RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftKnuckle1RowPickerButton"], "top", 55), (self.widgets[name + "_leftKnuckle1RowPickerButton"], "right", 7)])
self.widgets[name + "_leftKnuckle2RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftKnuckle2RowPickerButton"], "top", 80), (self.widgets[name + "_leftKnuckle2RowPickerButton"], "right", 7)])
self.widgets[name + "_leftKnuckle3RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftKnuckle3RowPickerButton"], "top", 105), (self.widgets[name + "_leftKnuckle3RowPickerButton"], "right", 7)])
#Left finger column globals
#get settings off of skeleton settings node to know what it is we need to create
self.widgets[name + "_leftIndexColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_l", "index_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndexColumnPickerButton"], "top", 7), (self.widgets[name + "_leftIndexColumnPickerButton"], "right", 107)])
self.widgets[name + "_leftMiddleColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["middle_metacarpal_ctrl_l", "middle_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddleColumnPickerButton"], "top", 7), (self.widgets[name + "_leftMiddleColumnPickerButton"], "right", 82)])
self.widgets[name + "_leftRingColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ring_metacarpal_ctrl_l", "ring_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRingColumnPickerButton"], "top", 7), (self.widgets[name + "_leftRingColumnPickerButton"], "right", 57)])
self.widgets[name + "_leftPinkyColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["pinky_metacarpal_ctrl_l", "pinky_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinkyColumnPickerButton"], "top", 7), (self.widgets[name + "_leftPinkyColumnPickerButton"], "right", 32)])
#Left thumb global
self.widgets[name + "_leftThumbColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["thumb_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_3_l"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumbColumnPickerButton"], "top", 7), (self.widgets[name + "_leftThumbColumnPickerButton"], "right", 132)])
#Left Finger IK
if cmds.objExists(name + ":index_l_ik_anim"):
self.widgets[name + "_leftIndexFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "index_l_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndexFingerIKPickerButton"], "top", 130), (self.widgets[name + "_leftIndexFingerIKPickerButton"], "right", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIndexFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "l"))
if cmds.objExists(name + ":middle_l_ik_anim"):
self.widgets[name + "_leftMiddleFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "middle_l_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddleFingerIKPickerButton"], "top", 130), (self.widgets[name + "_leftMiddleFingerIKPickerButton"], "right", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftMiddleFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "l"))
if cmds.objExists(name + ":ring_l_ik_anim"):
self.widgets[name + "_leftRingFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ring_l_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRingFingerIKPickerButton"], "top", 130), (self.widgets[name + "_leftRingFingerIKPickerButton"], "right", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftRingFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "l"))
if cmds.objExists(name + ":pinky_l_ik_anim"):
self.widgets[name + "_leftPinkyFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "pinky_l_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinkyFingerIKPickerButton"], "top", 130), (self.widgets[name + "_leftPinkyFingerIKPickerButton"], "right", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftPinkyFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "l"))
if cmds.objExists(name + ":thumb_l_ik_anim"):
self.widgets[name + "_leftThumbFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "thumb_l_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumbFingerIKPickerButton"], "top", 107), (self.widgets[name + "_leftThumbFingerIKPickerButton"], "right", 152)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftThumbFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "l"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "l"))
self.widgets[name + "_leftIkFingersRowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_l_ik_anim", "middle_l_ik_anim", "ring_l_ik_anim", "pinky_l_ik_anim", "thumb_l_ik_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkFingersRowPickerButton"], "top", 130), (self.widgets[name + "_leftIkFingersRowPickerButton"], "right", 7)])
#Left Finger IK Pole Vectors
if cmds.objExists(name + ":index_l_ik_anim"):
self.widgets[name + "_leftIndexIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "index_l_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIndexIkPvPickerButton"], "top", 155), (self.widgets[name + "_leftIndexIkPvPickerButton"], "right", 112)])
if cmds.objExists(name + ":middle_l_ik_anim"):
self.widgets[name + "_leftMiddleIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "middle_l_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftMiddleIkPvPickerButton"], "top", 155), (self.widgets[name + "_leftMiddleIkPvPickerButton"], "right", 87)])
if cmds.objExists(name + ":ring_l_ik_anim"):
self.widgets[name + "_leftRingIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ring_l_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftRingIkPvPickerButton"], "top", 155), (self.widgets[name + "_leftRingIkPvPickerButton"], "right", 62)])
if cmds.objExists(name + ":pinky_l_ik_anim"):
self.widgets[name + "_leftPinkyIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "pinky_l_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftPinkyIkPvPickerButton"], "top", 155), (self.widgets[name + "_leftPinkyIkPvPickerButton"], "right", 37)])
if cmds.objExists(name + ":thumb_l_ik_anim"):
self.widgets[name + "_leftThumbIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "thumb_l_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftThumbIkPvPickerButton"], "top", 132), (self.widgets[name + "_leftThumbIkPvPickerButton"], "right", 157)])
self.widgets[name + "_leftIkFingersPvsPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_l_poleVector", "middle_l_poleVector", "ring_l_poleVector", "pinky_l_poleVector", "thumb_l_poleVector"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkFingersPvsPickerButton"], "top", 155), (self.widgets[name + "_leftIkFingersPvsPickerButton"], "right", 12)])
#Left IK Global Control
self.widgets[name + "_leftIkGlobalCtrlPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "l_global_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_leftIkGlobalCtrlPickerButton"], "top", 7), (self.widgets[name + "_leftIkGlobalCtrlPickerButton"], "right", 7)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_leftIkGlobalCtrlPickerButton"])
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_lIkGlobalCtrl_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_lIkGlobalCtrl_RadioCollection"] , "l_global_ik_anim_space_switcher"))
#Right Pinky
#get settings off of skeleton settings node to know what it is we need to create
rightPinkyMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.rightpinkymeta")
rightPinky1 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightpinky1")
rightPinky2 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightpinky2")
rightPinky3 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightpinky3")
if rightPinkyMeta:
self.widgets[name + "_rightPinkyMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_metacarpal_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinkyMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_rightPinkyMetacarpalPickerButton"], "left", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightPinkyMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "r"))
if rightPinky1:
self.widgets[name + "_rightPinky1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_1_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinky1PickerButton"], "top", 55), (self.widgets[name + "_rightPinky1PickerButton"], "left", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightPinky1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "r"))
if rightPinky2:
self.widgets[name + "_rightPinky2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_2_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinky2PickerButton"], "top", 80), (self.widgets[name + "_rightPinky2PickerButton"], "left", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightPinky2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "r"))
if rightPinky3:
self.widgets[name + "_rightPinky3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "pinky_finger_fk_ctrl_3_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinky3PickerButton"], "top", 105), (self.widgets[name + "_rightPinky3PickerButton"], "left", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightPinky3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "r"))
#Right Ring
#get settings off of skeleton settings node to know what it is we need to create
rightRingMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.rightringmeta")
rightRing1 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightring1")
rightRing2 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightring2")
rightRing3 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightring3")
if rightRingMeta:
self.widgets[name + "_rightRingMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_metacarpal_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRingMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_rightRingMetacarpalPickerButton"], "left", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightRingMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "r"))
if rightRing1:
self.widgets[name + "_rightRing1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_1_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRing1PickerButton"], "top", 55), (self.widgets[name + "_rightRing1PickerButton"], "left", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightRing1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "r"))
if rightRing2:
self.widgets[name + "_rightRing2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_2_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRing2PickerButton"], "top", 80), (self.widgets[name + "_rightRing2PickerButton"], "left", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightRing2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "r"))
if rightRing3:
self.widgets[name + "_rightRing3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "ring_finger_fk_ctrl_3_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRing3PickerButton"], "top", 105), (self.widgets[name + "_rightRing3PickerButton"], "left", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightRing3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "r"))
#Right Middle
#get settings off of skeleton settings node to know what it is we need to create
rightMiddleMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.rightmiddlemeta")
rightMiddle1 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightmiddle1")
rightMiddle2 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightmiddle2")
rightMiddle3 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightmiddle3")
if rightMiddleMeta:
self.widgets[name + "_rightMiddleMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_metacarpal_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddleMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_rightMiddleMetacarpalPickerButton"], "left", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightMiddleMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "r"))
if rightMiddle1:
self.widgets[name + "_rightMiddle1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_1_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddle1PickerButton"], "top", 55), (self.widgets[name + "_rightMiddle1PickerButton"], "left", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightMiddle1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "r"))
if rightMiddle2:
self.widgets[name + "_rightMiddle2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_2_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddle2PickerButton"], "top", 80), (self.widgets[name + "_rightMiddle2PickerButton"], "left", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightMiddle2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "r"))
if rightMiddle3:
self.widgets[name + "_rightMiddle3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "middle_finger_fk_ctrl_3_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddle3PickerButton"], "top", 105), (self.widgets[name + "_rightMiddle3PickerButton"], "left", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightMiddle3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "r"))
#Right Index
#get settings off of skeleton settings node to know what it is we need to create
rightIndexMeta = cmds.getAttr(name + ":" + "Skeleton_Settings.rightindexmeta")
rightIndex1 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightindex1")
rightIndex2 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightindex2")
rightIndex3 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightindex3")
if rightIndexMeta:
self.widgets[name + "_rightIndexMetacarpalPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_metacarpal_ctrl_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndexMetacarpalPickerButton"], "top", 30), (self.widgets[name + "_rightIndexMetacarpalPickerButton"], "left", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIndexMetacarpalPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "r"))
if rightIndex1:
self.widgets[name + "_rightIndex1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_1_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndex1PickerButton"], "top", 55), (self.widgets[name + "_rightIndex1PickerButton"], "left", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIndex1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "r"))
if rightIndex2:
self.widgets[name + "_rightIndex2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_2_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndex2PickerButton"], "top", 80), (self.widgets[name + "_rightIndex2PickerButton"], "left", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIndex2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "r"))
if rightIndex3:
self.widgets[name + "_rightIndex3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "index_finger_fk_ctrl_3_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndex3PickerButton"], "top", 105), (self.widgets[name + "_rightIndex3PickerButton"], "left", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIndex3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "r"))
#Right Thumb
#get settings off of skeleton settings node to know what it is we need to create
rightThumb1 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightthumb1")
rightThumb2 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightthumb2")
rightThumb3 = cmds.getAttr(name + ":" + "Skeleton_Settings.rightthumb3")
if rightThumb1:
self.widgets[name + "_rightThumb1PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_1_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumb1PickerButton"], "top", 30), (self.widgets[name + "_rightThumb1PickerButton"], "left", 132)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightThumb1PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "r"))
if rightThumb2:
self.widgets[name + "_rightThumb2PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_2_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumb2PickerButton"], "top", 55), (self.widgets[name + "_rightThumb2PickerButton"], "left", 142)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightThumb2PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "r"))
if rightThumb3:
self.widgets[name + "_rightThumb3PickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.blue, c = partial(self.buttonSelectCommand, name, "thumb_finger_fk_ctrl_3_r"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumb3PickerButton"], "top", 80), (self.widgets[name + "_rightThumb3PickerButton"], "left", 152)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightThumb3PickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "r"))
#Right finger row globals
#get settings off of skeleton settings node to know what it is we need to create
self.widgets[name + "_rightMetaRowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_r", "middle_metacarpal_ctrl_r", "ring_metacarpal_ctrl_r", "pinky_metacarpal_ctrl_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMetaRowPickerButton"], "top", 30), (self.widgets[name + "_rightMetaRowPickerButton"], "left", 7)])
self.widgets[name + "_rightKnuckle1RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_1_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightKnuckle1RowPickerButton"], "top", 55), (self.widgets[name + "_rightKnuckle1RowPickerButton"], "left", 7)])
self.widgets[name + "_rightKnuckle2RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_2_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightKnuckle2RowPickerButton"], "top", 80), (self.widgets[name + "_rightKnuckle2RowPickerButton"], "left", 7)])
self.widgets[name + "_rightKnuckle3RowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_finger_fk_ctrl_3_r", "middle_finger_fk_ctrl_3_r", "ring_finger_fk_ctrl_3_r", "pinky_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightKnuckle3RowPickerButton"], "top", 105), (self.widgets[name + "_rightKnuckle3RowPickerButton"], "left", 7)])
#Right finger column globals
#get settings off of skeleton settings node to know what it is we need to create
self.widgets[name + "_rightIndexColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndexColumnPickerButton"], "top", 7), (self.widgets[name + "_rightIndexColumnPickerButton"], "left", 107)])
self.widgets[name + "_rightMiddleColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["middle_metacarpal_ctrl_r", "middle_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddleColumnPickerButton"], "top", 7), (self.widgets[name + "_rightMiddleColumnPickerButton"], "left", 82)])
self.widgets[name + "_rightRingColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["ring_metacarpal_ctrl_r", "ring_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRingColumnPickerButton"], "top", 7), (self.widgets[name + "_rightRingColumnPickerButton"], "left", 57)])
self.widgets[name + "_rightPinkyColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["pinky_metacarpal_ctrl_r", "pinky_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinkyColumnPickerButton"], "top", 7), (self.widgets[name + "_rightPinkyColumnPickerButton"], "left", 32)])
#Right thumb global
self.widgets[name + "_rightThumbColumnPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["thumb_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_3_r"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumbColumnPickerButton"], "top", 7), (self.widgets[name + "_rightThumbColumnPickerButton"], "left", 132)])
#Right Finger IK
if cmds.objExists(name + ":index_r_ik_anim"):
self.widgets[name + "_rightIndexFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "index_r_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndexFingerIKPickerButton"], "top", 130), (self.widgets[name + "_rightIndexFingerIKPickerButton"], "left", 107)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIndexFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "index", 1, "r"))
if cmds.objExists(name + ":middle_r_ik_anim"):
self.widgets[name + "_rightMiddleFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "middle_r_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddleFingerIKPickerButton"], "top", 130), (self.widgets[name + "_rightMiddleFingerIKPickerButton"], "left", 82)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightMiddleFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "middle", 1, "r"))
if cmds.objExists(name + ":ring_r_ik_anim"):
self.widgets[name + "_rightRingFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ring_r_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRingFingerIKPickerButton"], "top", 130), (self.widgets[name + "_rightRingFingerIKPickerButton"], "left", 57)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightRingFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "ring", 1, "r"))
if cmds.objExists(name + ":pinky_r_ik_anim"):
self.widgets[name + "_rightPinkyFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "pinky_r_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinkyFingerIKPickerButton"], "top", 130), (self.widgets[name + "_rightPinkyFingerIKPickerButton"], "left", 32)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightPinkyFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "pinky", 1, "r"))
if cmds.objExists(name + ":thumb_r_ik_anim"):
self.widgets[name + "_rightThumbFingerIKPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "thumb_r_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumbFingerIKPickerButton"], "top", 107), (self.widgets[name + "_rightThumbFingerIKPickerButton"], "left", 152)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightThumbFingerIKPickerButton"])
cmds.menuItem(label = "Finger FK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 0, "r"))
cmds.menuItem(label = "Finger IK Mode", parent = menu, c = partial(self.switchFingerMode, name, "thumb", 1, "r"))
self.widgets[name + "_rightIkFingersRowPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_r_ik_anim", "middle_r_ik_anim", "ring_r_ik_anim", "pinky_r_ik_anim", "thumb_r_ik_anim"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkFingersRowPickerButton"], "top", 130), (self.widgets[name + "_rightIkFingersRowPickerButton"], "left", 7)])
#Right Finger IK Pole Vectors
if cmds.objExists(name + ":index_r_ik_anim"):
self.widgets[name + "_rightIndexIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "index_r_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIndexIkPvPickerButton"], "top", 155), (self.widgets[name + "_rightIndexIkPvPickerButton"], "left", 112)])
if cmds.objExists(name + ":middle_r_ik_anim"):
self.widgets[name + "_rightMiddleIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "middle_r_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightMiddleIkPvPickerButton"], "top", 155), (self.widgets[name + "_rightMiddleIkPvPickerButton"], "left", 87)])
if cmds.objExists(name + ":ring_r_ik_anim"):
self.widgets[name + "_rightRingIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "ring_r_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightRingIkPvPickerButton"], "top", 155), (self.widgets[name + "_rightRingIkPvPickerButton"], "left", 62)])
if cmds.objExists(name + ":pinky_r_ik_anim"):
self.widgets[name + "_rightPinkyIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "pinky_r_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightPinkyIkPvPickerButton"], "top", 155), (self.widgets[name + "_rightPinkyIkPvPickerButton"], "left", 37)])
if cmds.objExists(name + ":thumb_r_ik_anim"):
self.widgets[name + "_rightThumbIkPvPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "thumb_r_poleVector"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightThumbIkPvPickerButton"], "top", 132), (self.widgets[name + "_rightThumbIkPvPickerButton"], "left", 157)])
self.widgets[name + "_rightIkFingersPvsPickerButton"] = cmds.button(w = 10, h = 10, label = "", parent = layout, bgc = self.green, c = partial(self.multiButtonSelectCommand, name, ["index_r_poleVector", "middle_r_poleVector", "ring_r_poleVector", "pinky_r_poleVector", "thumb_r_poleVector"]))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkFingersPvsPickerButton"], "top", 155), (self.widgets[name + "_rightIkFingersPvsPickerButton"], "left", 12)])
#Right IK Global Control
self.widgets[name + "_rightIkGlobalCtrlPickerButton"] = cmds.button(w = 20, h = 20, label = "", parent = layout, bgc = self.orange, c = partial(self.buttonSelectCommand, name, "r_global_ik_anim"))
cmds.formLayout(layout, edit = True, af = [(self.widgets[name + "_rightIkGlobalCtrlPickerButton"], "top", 7), (self.widgets[name + "_rightIkGlobalCtrlPickerButton"], "left", 7)])
menu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rightIkGlobalCtrlPickerButton"])
spaceMenu = cmds.menuItem(label = "Space Switching", parent = menu, subMenu = True)
self.widgets[name + "_rIkGlobalCtrl_RadioCollection"] = cmds.radioMenuItemCollection(parent = spaceMenu)
cmds.menuItem(spaceMenu, edit = True, postMenuCommand = partial(self.getControlSpaces, menu, self.widgets[name + "_rIkGlobalCtrl_RadioCollection"] , "r_global_ik_anim_space_switcher"))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def buttonSelectCommand(self, name, control, *args):
#when a picker button gets clicked, we need to select the corresponding item(taking into account selection modifiers), and color the button white
mods = cmds.getModifiers()
if (mods & 1) > 0:
if cmds.objExists(name + ":" + control):
cmds.select(name + ":" + control, tgl = True)
if (mods & 1) == 0:
if cmds.objExists(name + ":" + control):
cmds.select(name + ":" + control)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def clavSelectCommand(self, name, fkControl, ikControl, *args):
#when a picker button gets clicked, we need to select the corresponding item(taking into account selection modifiers), and color the button white
#get the clavicle mode
clavMode = 1
try:
#fk_clavicle_l_anim
side = fkControl.partition("clavicle_")[2].partition("_")[0]
clavMode = cmds.getAttr(name + ":Rig_Settings." + side + "ClavMode")
except:
pass
mods = cmds.getModifiers()
if clavMode == 0:
if (mods & 1) > 0:
if cmds.objExists(name + ":" + fkControl):
cmds.select(name + ":" + fkControl, tgl = True)
if (mods & 1) == 0:
if cmds.objExists(name + ":" + fkControl):
cmds.select(name + ":" + fkControl)
if clavMode == 1:
if (mods & 1) > 0:
if cmds.objExists(name + ":" + ikControl):
cmds.select(name + ":" + ikControl, tgl = True)
if (mods & 1) == 0:
if cmds.objExists(name + ":" + ikControl):
cmds.select(name + ":" + ikControl)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def multiButtonSelectCommand(self, name, controls, *args):
#when a picker button gets clicked, we need to select the corresponding item(taking into account selection modifiers), and color the button white
mods = cmds.getModifiers()
if (mods & 1) > 0:
selection = cmds.ls(sl = True)
for control in controls:
if cmds.objExists(name + ":" + control):
cmds.select(name + ":" + control, tgl = True)
if (mods & 1) == 0:
cmds.select(clear = True)
for control in controls:
if cmds.objExists(name + ":" + control):
cmds.select(name + ":" + control, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchSpineMode(self, name, mode, *args):
match = cmds.menuItem(self.widgets["menuBar_settings_matching"], q = True, checkBox = True)
if mode == "FK":
#if match on switch is checked, match as well
if match == True:
self.match_singleFrame("spine", None, "FK", "IK")
cmds.setAttr(name + ":Rig_Settings.spine_ik", 0)
cmds.setAttr(name + ":Rig_Settings.spine_fk", 1)
cmds.setKeyframe(name + ":Rig_Settings.spine_ik")
cmds.setKeyframe(name + ":Rig_Settings.spine_fk")
if mode == "IK":
if match == True:
self.match_singleFrame("spine", None, "IK", "FK")
cmds.setAttr(name + ":Rig_Settings.spine_ik", 1)
cmds.setAttr(name + ":Rig_Settings.spine_fk", 0)
cmds.setKeyframe(name + ":Rig_Settings.spine_ik")
cmds.setKeyframe(name + ":Rig_Settings.spine_fk")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchClavMode(self, name, mode, side, *args):
match = cmds.menuItem(self.widgets["menuBar_settings_matching"], q = True, checkBox = True)
if mode == "FK":
if match == True:
self.match_singleFrame("clav", side, "FK", "IK")
cmds.setAttr(name + ":Rig_Settings." + side + "ClavMode", 0)
cmds.setKeyframe(name + ":Rig_Settings." + side + "ClavMode")
if mode == "IK":
if match == True:
self.match_singleFrame("clav", side, "IK", "FK")
cmds.setAttr(name + ":Rig_Settings." + side + "ClavMode", 1)
cmds.setKeyframe(name + ":Rig_Settings." + side + "ClavMode")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchArmMode(self, name, mode, side, *args):
match = cmds.menuItem(self.widgets["menuBar_settings_matching"], q = True, checkBox = True)
if mode == "FK":
if match == True:
self.match_singleFrame("arm", side, "FK", "IK")
cmds.setAttr(name + ":Rig_Settings." + side + "ArmMode", 0)
cmds.setKeyframe(name + ":Rig_Settings." + side + "ArmMode")
if mode == "IK":
if match == True:
self.match_singleFrame("arm", side, "IK", "FK")
cmds.setAttr(name + ":Rig_Settings." + side + "ArmMode", 1)
cmds.setKeyframe(name + ":Rig_Settings." + side + "ArmMode")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchArmOrientMode(self, name, mode, side, *args):
currentMode = cmds.getAttr(name + ":Rig_Settings." + side + "FkArmOrient")
if currentMode != mode:
if mode == 2:
constraint = cmds.parentConstraint(name + ":fk_orient_master_loc_" + side, name + ":fk_orient_world_loc_" + side)[0]
cmds.setKeyframe(name + ":fk_orient_world_loc_" + side)
cmds.delete(constraint)
if mode == 1:
constraint = cmds.parentConstraint(name + ":fk_orient_master_loc_" + side, name + ":fk_orient_body_loc_" + side)[0]
cmds.setKeyframe(name + ":fk_orient_body_loc_" + side)
cmds.delete(constraint)
cmds.setAttr(name + ":Rig_Settings." + side + "FkArmOrient", mode)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchHeadOrientMode(self, name, mode, *args):
currentSelection = cmds.ls(sl = True)
currentMode = cmds.getAttr(name + ":head_fk_anim.fkOrientation")
currentFrame = cmds.currentTime(q = True)
if currentMode != mode:
cmds.currentTime(currentFrame - 1)
cmds.setKeyframe(name + ":head_fk_anim")
cmds.currentTime(currentFrame)
#create temp locator
tempLoc = cmds.spaceLocator(name = "headSnapTempLoc")[0]
constraint = cmds.parentConstraint(name + ":head_fk_anim", tempLoc)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint(tempLoc, name + ":head_fk_anim")[0]
cmds.setAttr(name + ":head_fk_anim.fkOrientation", mode)
cmds.setKeyframe(name + ":head_fk_anim")
cmds.delete(constraint)
cmds.delete(tempLoc)
if len(currentSelection) > 0:
cmds.select(currentSelection)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchNeckOrientMode(self, name, mode, *args):
currentSelection = cmds.ls(sl = True)
currentMode = cmds.getAttr(name + ":neck_01_fk_anim.fkOrientation")
currentFrame = cmds.currentTime(q = True)
if currentMode != mode:
cmds.currentTime(currentFrame - 1)
cmds.setKeyframe(name + ":neck_01_fk_anim")
cmds.currentTime(currentFrame)
#create temp locator
tempLoc = cmds.spaceLocator(name = "neckSnapTempLoc")[0]
constraint = cmds.parentConstraint(name + ":neck_01_fk_anim", tempLoc)[0]
cmds.delete(constraint)
constraint = cmds.orientConstraint(tempLoc, name + ":neck_01_fk_anim")[0]
cmds.setAttr(name + ":neck_01_fk_anim.fkOrientation", mode)
cmds.setKeyframe(name + ":neck_01_fk_anim")
cmds.delete(constraint)
cmds.delete(tempLoc)
if len(currentSelection) > 0:
cmds.select(currentSelection)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchLegMode(self, name, mode, side, *args):
match = cmds.menuItem(self.widgets["menuBar_settings_matching"], q = True, checkBox = True)
if mode == "FK":
if match == True:
self.match_singleFrame("leg", side, "FK", "IK")
cmds.setAttr(name + ":Rig_Settings." + side + "LegMode", 0)
cmds.setKeyframe(name + ":Rig_Settings." + side + "LegMode")
if mode == "IK":
if match == True:
self.match_singleFrame("leg", side, "IK", "FK")
cmds.setAttr(name + ":Rig_Settings." + side + "LegMode", 1)
cmds.setKeyframe(name + ":Rig_Settings." + side + "LegMode")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchCustomChainMode(self, character, controlPrefix, mode, *args):
if mode == "FK":
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_fk", 1)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_ik", 0)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_dynamic", 0)
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_fk")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_ik")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_dynamic")
if mode == "IK":
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_ik", 1)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_fk", 0)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_dynamic", 0)
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_fk")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_ik")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_dynamic")
if mode == "DYNAMIC":
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_dynamic", 1)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_fk", 0)
cmds.setAttr(character + ":Rig_Settings." + controlPrefix + "_ik", 0)
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_fk")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_ik")
cmds.setKeyframe(character + ":Rig_Settings." + controlPrefix + "_dynamic")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def switchFingerMode(self, name, finger, mode, side, *args):
cmds.setAttr(name + ":" + finger + "_finger_" + side + "_mode_anim.FK_IK", mode)
cmds.setKeyframe(name + ":" + finger + "_finger_" + side + "_mode_anim.FK_IK")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def listView_selAllBelow(self, name, *args):
mods = cmds.getModifiers()
#get all controls below
children = cmds.treeView(self.widgets[name + "_treeViewWidget"], q = True, children = args[0])
if (mods & 1) > 0:
for child in children:
if cmds.objExists(name + ":" + child):
cmds.select(name + ":" + child, tgl = True)
#hilight object in listView
cmds.treeView(self.widgets[name + "_treeViewWidget"], edit = True, selectItem = [child, True])
if (mods & 1) == 0:
cmds.select(clear = True)
for child in children:
if cmds.objExists(name + ":" + child):
cmds.select(name + ":" + child, add = True)
#hilight object in listView
cmds.treeView(self.widgets[name + "_treeViewWidget"], edit = True, selectItem = [child, True])
#self.listView_ScriptJob(name)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def listViewSelectItem(self, name, *args):
mods = cmds.getModifiers()
selected = cmds.treeView(self.widgets[name + "_treeViewWidget"], q = True, selectItem = True)
if (mods & 1) > 0:
for item in selected:
if cmds.objExists(name + ":" + item):
cmds.select(name + ":" + item, tgl = True)
if (mods & 1) == 0:
cmds.select(clear = True)
for item in selected:
if cmds.objExists(name + ":" + item):
cmds.select(name + ":" + item, add = True)
#clearSelection
#self.listView_ScriptJob(name)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def listView_ScriptJob(self, *args):
self.widgets["listViewScriptJob"] = cmds.scriptJob(event = ["SelectionChanged", self.listView_scriptJobCommand], parent = self.widgets["window"], runOnce = True, kws = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def listView_scriptJobCommand(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
selection = cmds.ls(sl = True)
selectedItems = cmds.treeView(self.widgets[character + "_treeViewWidget"], q = True, selectItem = True)
if selectedItems != None:
for item in selectedItems:
if character + ":" + item not in selection:
cmds.treeView(self.widgets[character + "_treeViewWidget"], edit = True, selectItem = [item, False])
for item in selection:
niceName = item.partition(":")[2]
if selectedItems != None:
if niceName not in selectedItems:
if cmds.treeView(self.widgets[character + "_treeViewWidget"], q = True, itemExists = niceName):
cmds.treeView(self.widgets[character + "_treeViewWidget"], edit = True, selectItem = [niceName, True])
else:
if cmds.treeView(self.widgets[character + "_treeViewWidget"], q = True, itemExists = niceName):
cmds.treeView(self.widgets[character + "_treeViewWidget"], edit = True, selectItem = [niceName, True])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createListView(self, name, layout, *args):
#create a scrollLayout that is a child of the passed in layout
self.widgets[name + "_listViewMainLayout"] = cmds.scrollLayout(w = 400, h = 700, hst = 0, parent = layout)
#self.formsToHide.append(self.widgets[name + "_listViewMainLayout"] )
#create the tree view widget
self.widgets[name + "_treeViewWidget"] = cmds.treeView(parent = self.widgets[name + "_listViewMainLayout"], numberOfButtons = 2, abr = True, w = 380, h = 3000, selectionChangedCommand = partial(self.listViewSelectItem, name), pressCommand = [[1, partial(self.listView_selAllBelow, name)], [2, partial(self.toggleVisibilityOnSelectedControlGroups, name)]])
#TOP LEVEL CONTROLS
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("RIG CONTROLS", ""), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["RIG CONTROLS", 1], expandItem = ["RIG CONTROLS", False],tc = ["RIG CONTROLS", self.orange[0], self.orange[1], self.orange[2]], bti = [["RIG CONTROLS", 1, "S"], ["RIG CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("Rig_Settings", "RIG CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("master_anim", "RIG CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("offset_anim", "RIG CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("root_anim", "RIG CONTROLS"), hb = True)
#TORSO
torsoControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("TORSO CONTROLS", "RIG CONTROLS"), hb = False)
torsoControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["TORSO CONTROLS", 1], expandItem = ["TORSO CONTROLS", False],tc = ["TORSO CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["TORSO CONTROLS", .3, .3, .3], bti = [["TORSO CONTROLS", 1, "S"], ["TORSO CONTROLS", 2, "V"]])
bodyAnim = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("body_anim", "TORSO CONTROLS"), hb = True)
hipAnim = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("hip_anim", "TORSO CONTROLS"), hb = True)
fkControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("SPINE FK CONTROLS", "TORSO CONTROLS"), hb = False)
torsoControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["SPINE FK CONTROLS", 2], tc = ["SPINE FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["SPINE FK CONTROLS", 1, "S"], ["SPINE FK CONTROLS", 2, "V"]])
for control in ["spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim"]:
if cmds.objExists(name + ":" + control):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (control, "SPINE FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "chest_ik_anim"):
ikControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("SPINE IK CONTROLS", "TORSO CONTROLS"), hb = False)
torsoControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["SPINE IK CONTROLS", 2], tc = ["SPINE IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["SPINE IK CONTROLS", 1, "S"], ["SPINE IK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("chest_ik_anim", "SPINE IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("mid_ik_anim", "SPINE IK CONTROLS"), hb = True)
#HEAD
headControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("HEAD CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["HEAD CONTROLS", 1], expandItem = ["HEAD CONTROLS", False],tc = ["HEAD CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["HEAD CONTROLS", .4, .4, .4], bti = [["HEAD CONTROLS", 1, "S"], ["HEAD CONTROLS", 2, "V"]])
for control in ["neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim"]:
if cmds.objExists(name + ":" + control):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (control, "HEAD CONTROLS"), hb = True)
headAnim = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("head_fk_anim", "HEAD CONTROLS"), hb = True)
#LEFT ARM
lArmControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("LEFT ARM CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["LEFT ARM CONTROLS", 1], expandItem = ["LEFT ARM CONTROLS", False],tc = ["LEFT ARM CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["LEFT ARM CONTROLS", .3, .3, .3], bti = [["LEFT ARM CONTROLS", 1, "S"], ["LEFT ARM CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("clavicle_l_anim", "LEFT ARM CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_clavicle_l_anim", "LEFT ARM CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L ARM FK CONTROLS", "LEFT ARM CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L ARM FK CONTROLS", 2], tc = ["L ARM FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L ARM FK CONTROLS", 1, "S"], ["L ARM FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_arm_l_anim", "L ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_elbow_l_anim", "L ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_wrist_l_anim", "L ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L ARM IK CONTROLS", "LEFT ARM CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L ARM IK CONTROLS", 2], tc = ["L ARM IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L ARM IK CONTROLS", 1, "S"], ["L ARM IK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_elbow_l_anim", "L ARM IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_wrist_l_anim", "L ARM IK CONTROLS"), hb = True)
#RIGHT ARM
lArmControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("RIGHT ARM CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["RIGHT ARM CONTROLS", 1], expandItem = ["RIGHT ARM CONTROLS", False],tc = ["RIGHT ARM CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["RIGHT ARM CONTROLS", .4, .4, .4], bti = [["RIGHT ARM CONTROLS", 1, "S"], ["RIGHT ARM CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("clavicle_r_anim", "RIGHT ARM CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_clavicle_r_anim", "RIGHT ARM CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R ARM FK CONTROLS", "RIGHT ARM CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R ARM FK CONTROLS", 2], tc = ["R ARM FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R ARM FK CONTROLS", 1, "S"], ["R ARM FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_arm_r_anim", "R ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_elbow_r_anim", "R ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_wrist_r_anim", "R ARM FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R ARM IK CONTROLS", "RIGHT ARM CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R ARM IK CONTROLS", 2], tc = ["R ARM IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R ARM FK CONTROLS", 1, "S"], ["R ARM FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_elbow_r_anim", "R ARM IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_wrist_r_anim", "R ARM IK CONTROLS"), hb = True)
#LEFT FINGERS
lFingerControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("LEFT FINGER CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["LEFT FINGER CONTROLS", 1], expandItem = ["LEFT FINGER CONTROLS", False],tc = ["LEFT FINGER CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["LEFT FINGER CONTROLS", .3, .3, .3], bti = [["LEFT FINGER CONTROLS", 1, "S"], ["LEFT FINGER CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L FINGER FK CONTROLS", "LEFT FINGER CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L FINGER FK CONTROLS", 2], tc = ["L FINGER FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], expandItem = ["L FINGER FK CONTROLS", False], bti = [["L FINGER FK CONTROLS", 1, "S"], ["L FINGER FK CONTROLS", 2, "V"]])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L_" + finger + "_FK", "L FINGER FK CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L_" + finger + "_FK", 2], tc = ["L_" + finger + "_FK", self.blue[0], self.blue[1], self.blue[2]], bti = [["L_" + finger + "_FK", 1, "S"], ["L_" + finger + "_FK", 2, "V"]])
if cmds.objExists(name + ":" + finger + "_metacarpal_ctrl_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_metacarpal_ctrl_l", "L_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_1_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_1_l", "L_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_2_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_2_l", "L_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_3_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_3_l", "L_" + finger + "_FK"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L FINGER IK CONTROLS", "LEFT FINGER CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L FINGER IK CONTROLS", 2], tc = ["L FINGER IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], expandItem = ["L FINGER IK CONTROLS", False], bti = [["L FINGER IK CONTROLS", 1, "S"], ["L FINGER IK CONTROLS", 2, "V"]])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_l_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_l_ik_anim", "L FINGER IK CONTROLS"), hb = True)
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_l_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_l_poleVector", "L FINGER IK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "l_global_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("l_global_ik_anim", "L FINGER IK CONTROLS"), hb = True)
#RIGHT FINGERS
lFingerControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("RIGHT FINGER CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["RIGHT FINGER CONTROLS", 1], expandItem = ["RIGHT FINGER CONTROLS", False],tc = ["RIGHT FINGER CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["RIGHT FINGER CONTROLS", .4, .4, .4], bti = [["RIGHT FINGER CONTROLS", 1, "S"], ["RIGHT FINGER CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R FINGER FK CONTROLS", "RIGHT FINGER CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R FINGER FK CONTROLS", 2], tc = ["R FINGER FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], expandItem = ["R FINGER FK CONTROLS", False], bti = [["R FINGER FK CONTROLS", 1, "S"], ["R FINGER FK CONTROLS", 2, "V"]])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R_" + finger + "_FK", "R FINGER FK CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R_" + finger + "_FK", 2], tc = ["R_" + finger + "_FK", self.blue[0], self.blue[1], self.blue[2]], bti = [["R_" + finger + "_FK", 1, "S"], ["R_" + finger + "_FK", 2, "V"]])
if cmds.objExists(name + ":" + finger + "_metacarpal_ctrl_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_metacarpal_ctrl_r", "R_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_1_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_1_r", "R_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_2_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_2_r", "R_" + finger + "_FK"), hb = True)
if cmds.objExists(name + ":" + finger + "_finger_fk_ctrl_3_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_finger_fk_ctrl_3_r", "R_" + finger + "_FK"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R FINGER IK CONTROLS", "RIGHT FINGER CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R FINGER IK CONTROLS", 2], tc = ["R FINGER IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], expandItem = ["R FINGER IK CONTROLS", False], bti = [["R FINGER IK CONTROLS", 1, "S"], ["R FINGER IK CONTROLS", 2, "V"]])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_r_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_r_ik_anim", "R FINGER IK CONTROLS"), hb = True)
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_r_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (finger + "_r_poleVector", "R FINGER IK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "r_global_ik_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("r_global_ik_anim", "R FINGER IK CONTROLS"), hb = True)
#LEFT LEG
lLegControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("LEFT LEG CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["LEFT LEG CONTROLS", 1], expandItem = ["LEFT LEG CONTROLS", False],tc = ["LEFT LEG CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["LEFT LEG CONTROLS", .3, .3, .3], bti = [["LEFT LEG CONTROLS", 1, "S"], ["LEFT LEG CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L LEG FK CONTROLS", "LEFT LEG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L LEG FK CONTROLS", 2], tc = ["L LEG FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L LEG FK CONTROLS", 1, "S"], ["L LEG FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_thigh_l_anim", "L LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_calf_l_anim", "L LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_foot_l_anim", "L LEG FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "fk_ball_l_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_ball_l_anim", "L LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L LEG IK CONTROLS", "LEFT LEG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L LEG IK CONTROLS", 2], tc = ["L LEG IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L LEG IK CONTROLS", 1, "S"], ["L LEG IK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_foot_anim_l", "L LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("heel_ctrl_l", "L LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("toe_wiggle_ctrl_l", "L LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("toe_tip_ctrl_l", "L LEG IK CONTROLS"), hb = True)
#RIGHT LEG
rLegControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("RIGHT LEG CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["RIGHT LEG CONTROLS", 1], expandItem = ["RIGHT LEG CONTROLS", False],tc = ["RIGHT LEG CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["RIGHT LEG CONTROLS", .4, .4, .4], bti = [["RIGHT LEG CONTROLS", 1, "S"], ["RIGHT LEG CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R LEG FK CONTROLS", "RIGHT LEG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R LEG FK CONTROLS", 2], tc = ["R LEG FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R LEG FK CONTROLS", 1, "S"], ["R LEG FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_thigh_r_anim", "R LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_calf_r_anim", "R LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_foot_r_anim", "R LEG FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "fk_ball_r_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_ball_r_anim", "R LEG FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R LEG IK CONTROLS", "RIGHT LEG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R LEG IK CONTROLS", 2], tc = ["R LEG IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R LEG IK CONTROLS", 1, "S"], ["R LEG IK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_foot_anim_r", "R LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("heel_ctrl_r", "R LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("toe_wiggle_ctrl_r", "R LEG IK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("toe_tip_ctrl_r", "R LEG IK CONTROLS"), hb = True)
#LEFT TOES
lToeControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("LEFT TOE CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["LEFT TOE CONTROLS", 1], expandItem = ["LEFT TOE CONTROLS", False],tc = ["LEFT TOE CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["LEFT TOE CONTROLS", .3, .3, .3], bti = [["LEFT TOE CONTROLS", 1, "S"], ["LEFT TOE CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L TOE FK CONTROLS", "LEFT TOE CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L TOE FK CONTROLS", 2], tc = ["L TOE FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L TOE FK CONTROLS", 1, "S"], ["L TOE FK CONTROLS", 2, "V"]])
for toe in ["index", "middle", "ring", "pinky", "bigtoe"]:
if cmds.objExists(name + ":" + toe + "_metatarsal_ctrl_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "_metatarsal_ctrl_l", "L TOE FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + toe + "toe_fk_ctrl_1_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "toe_fk_ctrl_1_l", "L TOE FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + toe + "toe_fk_ctrl_2_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "toe_fk_ctrl_2_l", "L TOE FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("L TOE IK CONTROLS", "LEFT TOE CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["L TOE IK CONTROLS", 2], tc = ["L TOE IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["L TOE IK CONTROLS", 1, "S"], ["L TOE IK CONTROLS", 2, "V"]])
for toe in ["index", "middle", "ring", "pinky", "bigtoe"]:
if cmds.objExists(name + ":" + toe + "_ik_ctrl_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "_ik_ctrl_l", "L TOE IK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "ik_global_ctrl_l"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_global_ctrl_l", "L TOE IK CONTROLS"), hb = True)
#RIGHT TOES
lToeControls = cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("RIGHT TOE CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["RIGHT TOE CONTROLS", 1], expandItem = ["RIGHT TOE CONTROLS", False],tc = ["RIGHT TOE CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["RIGHT TOE CONTROLS", .4, .4, .4], bti = [["RIGHT TOE CONTROLS", 1, "S"], ["RIGHT TOE CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R TOE FK CONTROLS", "RIGHT TOE CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R TOE FK CONTROLS", 2], tc = ["R TOE FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R TOE FK CONTROLS", 1, "S"], ["R TOE FK CONTROLS", 2, "V"]])
for toe in ["index", "middle", "ring", "pinky", "bigtoe"]:
if cmds.objExists(name + ":" + toe + "_metatarsal_ctrl_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "_metatarsal_ctrl_r", "R TOE FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + toe + "toe_fk_ctrl_1_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "toe_fk_ctrl_1_r", "R TOE FK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + toe + "toe_fk_ctrl_2_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "toe_fk_ctrl_2_r", "R TOE FK CONTROLS"), hb = True)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("R TOE IK CONTROLS", "RIGHT TOE CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["R TOE IK CONTROLS", 2], tc = ["R TOE IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [["R TOE IK CONTROLS", 1, "S"], ["R TOE IK CONTROLS", 2, "V"]])
for toe in ["index", "middle", "ring", "pinky", "bigtoe"]:
if cmds.objExists(name + ":" + toe + "_ik_ctrl_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (toe + "_ik_ctrl_r", "R TOE IK CONTROLS"), hb = True)
if cmds.objExists(name + ":" + "ik_global_ctrl_r"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("ik_global_ctrl_r", "R TOE IK CONTROLS"), hb = True)
#CUSTOM JOINTS
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("CUSTOM LEAF CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["CUSTOM LEAF CONTROLS", 1], expandItem = ["CUSTOM LEAF CONTROLS", False],tc = ["CUSTOM LEAF CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["CUSTOM LEAF CONTROLS", .3, .3, .3], bti = [["CUSTOM LEAF CONTROLS", 1, "S"], ["CUSTOM LEAF CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("CUSTOM JIGGLE CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["CUSTOM JIGGLE CONTROLS", 1], expandItem = ["CUSTOM JIGGLE CONTROLS", False],tc = ["CUSTOM JIGGLE CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["CUSTOM JIGGLE CONTROLS", .4, .4, .4], bti = [["CUSTOM JIGGLE CONTROLS", 1, "S"], ["CUSTOM JIGGLE CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("CUSTOM CHAIN CONTROLS", "RIG CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = ["CUSTOM CHAIN CONTROLS", 1], expandItem = ["CUSTOM CHAIN CONTROLS", False],tc = ["CUSTOM CHAIN CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = ["CUSTOM CHAIN CONTROLS", .3, .3, .3], bti = [["CUSTOM CHAIN CONTROLS", 1, "S"], ["CUSTOM CHAIN CONTROLS", 2, "V"]])
customJoints = []
attrs = cmds.listAttr(name + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
customJoints = sorted(customJoints)
for joint in customJoints:
attribute = cmds.getAttr(name + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "leaf":
label = label.partition(" (")[0]
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + "_anim", "CUSTOM LEAF CONTROLS"), hb = True)
if jointType == "jiggle":
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + "_anim", "CUSTOM JIGGLE CONTROLS"), hb = True)
if jointType == "chain" or jointType == "dynamic":
numJointsInChain = label.partition("(")[2].partition(")")[0]
label = label.partition(" (")[0]
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + " CONTROLS", "CUSTOM CHAIN CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = [label + " CONTROLS", 1], expandItem = [label + " CONTROLS", False],tc = [label + " CONTROLS", self.orange[0], self.orange[1], self.orange[2]], labelBackgroundColor = [label + " CONTROLS", .3, .3, .3])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + " FK CONTROLS", label + " CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = [label + " FK CONTROLS", 2], tc = [label + " FK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [[label + " FK CONTROLS", 1, "S"], [label + " FK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + " IK CONTROLS", label + " CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = [label + " IK CONTROLS", 2], tc = [label + " IK CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [[label + " IK CONTROLS", 1, "S"], [label + " IK CONTROLS", 2, "V"]])
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + " DYNAMIC CONTROLS", label + " CONTROLS"), hb = False)
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, ff = [label + " DYNAMIC CONTROLS", 2], tc = [label + " DYNAMIC CONTROLS", self.blue[0], self.blue[1], self.blue[2]], bti = [[label + " DYNAMIC CONTROLS", 1, "S"], [label + " DYNAMIC CONTROLS", 2, "V"]])
for i in range(int(numJointsInChain) + 1):
if cmds.objExists(name + ":" + "fk_" + label + "_0" + str(i) + "_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = ("fk_" + label + "_0" + str(i) + "_anim", label + " FK CONTROLS"), hb = True)
#ik controls
cmds.select("*:" + label + "_ik_*_anim")
selection = cmds.ls(sl = True)
for each in selection:
niceName = each.partition(":")[2]
try:
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (niceName, label + " IK CONTROLS"), hb = True)
except:
pass
for i in range(int(numJointsInChain)):
if cmds.objExists(name + ":" + label + "_cv_" + str(i) + "_anim"):
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + "_cv_" + str(i) + "_anim", label + " IK CONTROLS"), hb = True)
#dynamic controls
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, addItem = (label + "_dyn_anim", label + " DYNAMIC CONTROLS"), hb = True)
#expand
cmds.treeView(self.widgets[name + "_treeViewWidget"], e=True, expandItem = ["RIG CONTROLS", True])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createRigSettings(self, name, layout, *args):
#create a columnLayout that is a child of the passed in layout
self.widgets[name + "_rigSettingsMainColumn"] = cmds.columnLayout(parent = layout)
self.formsToHide.append(self.widgets[name + "_rigSettingsMainColumn"])
#First create left and right arm frame layouts with all of the settings for the arms
#rig mode, fk orientation space, ik stretch settings, arm roll settings
#LEFT ARM
self.widgets[name + "_rigSettings_leftArmFrame"] = cmds.frameLayout(label = "Left Arm", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "LeftArmSettings"), ec = partial(self.expandCommand, name, "LeftArmSettings"))
self.widgets[name + "_rigSettings_leftArmForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_leftArmFrame"])
#rig mode
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_leftArmMode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_leftArmFkModeButton"] = cmds.iconTextRadioButton(cl = self.widgets[name + "rigSettings_leftArmMode_Collection"], image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchArmMode, name, "FK", "l"))
self.widgets[name + "_rigSettings_leftArmIkModeButton"] = cmds.iconTextRadioButton(cl = self.widgets[name + "rigSettings_leftArmMode_Collection"], image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchArmMode, name, "IK", "l"))
mode = cmds.getAttr(name + ":Rig_Settings.lArmMode")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmFkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_leftArmFkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmIkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_leftArmIkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmIkModeButton"], edit = True, select = True)
#fk orientation space
text2 = cmds.text(label = "FK Arm Orientation Space:", font = "boldLabelFont")
self.widgets[name + "rigSettings_leftArmOrient_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_leftArmFkOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsClav_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsClav_on.bmp", w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 0, "l"))
self.widgets[name + "_rigSettings_leftArmBodyOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsBody_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsBody_on.bmp",w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 1, "l"))
self.widgets[name + "_rigSettings_leftArmWorldOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsWorld_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsWorld_on.bmp",w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 2, "l"))
mode = cmds.getAttr(name + ":Rig_Settings.lFkArmOrient")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text2, "top", 85),(text2, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmFkOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_leftArmFkOrientModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmBodyOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_leftArmBodyOrientModeButton"], "left", 138)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmWorldOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_leftArmWorldOrientModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmFkOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmBodyOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmWorldOrientModeButton"], edit = True, select = True)
#ik stretch settings
text3 = cmds.text(label = "IK Arm Stretch Settings", font = "boldLabelFont")
text4 = cmds.text(label = "Stretch")
text5 = cmds.text(label = "Squash")
stetchVal = cmds.getAttr(name + ":ik_wrist_l_anim.stretch")
try:
squashVal = cmds.getAttr(name + ":ik_wrist_l_anim.squash")
except:
pass
try:
self.widgets[name + "_rigSettings_leftArmIkStretchField"] = cmds.floatField( minValue=0, maxValue=1, value=stetchVal, w = 100)
except:
cmds.warning("Left Arm IK stretch above or below the allowed range.")
try:
self.widgets[name + "_rigSettings_leftArmIkStretchBiasField"] = cmds.floatField( minValue=0, maxValue=1, value=squashVal, w = 100 )
except:
cmds.warning("Left Arm IK Squash above or below the allowed range.")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text3, "top", 145),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text4, "top", 173),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text5, "top", 173),(text5, "left", 220)])
try:
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmIkStretchField"], "top", 170),(self.widgets[name + "_rigSettings_leftArmIkStretchField"], "left", 55)])
except:
pass
try:
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmIkStretchBiasField"], "top", 170),(self.widgets[name + "_rigSettings_leftArmIkStretchBiasField"], "left", 250)])
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmIkStretchField"], name + ":ik_wrist_l_anim.stretch" )
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmIkStretchBiasField"], name + ":ik_wrist_l_anim.squash")
except:
pass
#arm roll settings
text6 = cmds.text(label = "Arm Roll Settings", font = "boldLabelFont")
text7 = cmds.text(label = "Upper Arm Twist Amount:")
text8 = cmds.text(label = "Lower Arm Twist Amount:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text6, "top", 200),(text6, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text7, "top", 223),(text7, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text8, "top", 223),(text8, "left", 220)])
if cmds.objExists(name + ":Rig_Settings.lUpperarmTwistAmount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.lUpperarmTwistAmount")
self.widgets[name + "_rigSettings_leftArmUpArmTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmUpArmTwistField"], "top", 240),(self.widgets[name + "_rigSettings_leftArmUpArmTwistField"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmUpArmTwistField"], name + ":Rig_Settings.lUpperarmTwistAmount" )
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 242),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.lUpperarmTwist2Amount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.lUpperarmTwist2Amount")
self.widgets[name + "_rigSettings_leftArmUpArmTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmUpArmTwist2Field"], "top", 260),(self.widgets[name + "_rigSettings_leftArmUpArmTwist2Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmUpArmTwist2Field"], name + ":Rig_Settings.lUpperarmTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 262),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.lUpperarmTwist3Amount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.lUpperarmTwist3Amount")
self.widgets[name + "_rigSettings_leftArmUpArmTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmUpArmTwist3Field"], "top", 280),(self.widgets[name + "_rigSettings_leftArmUpArmTwist3Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmUpArmTwist3Field"], name + ":Rig_Settings.lUpperarmTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 282),(text, "left", 10)])
#lower arm
if cmds.objExists(name + ":Rig_Settings.lForearmTwistAmount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.lForearmTwistAmount")
self.widgets[name + "_rigSettings_leftArmLowArmTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100 )
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmLowArmTwistField"], "top", 240),(self.widgets[name + "_rigSettings_leftArmLowArmTwistField"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmLowArmTwistField"], name + ":Rig_Settings.lForearmTwistAmount")
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 242),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.lForearmTwist2Amount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.lForearmTwist2Amount")
self.widgets[name + "_rigSettings_leftArmLowArmTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmLowArmTwist2Field"], "top", 260),(self.widgets[name + "_rigSettings_leftArmLowArmTwist2Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmLowArmTwist2Field"], name + ":Rig_Settings.lForearmTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 262),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.lForearmTwist3Amount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.lForearmTwist3Amount")
self.widgets[name + "_rigSettings_leftArmLowArmTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftArmLowArmTwist3Field"], "top", 280),(self.widgets[name + "_rigSettings_leftArmLowArmTwist3Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftArmLowArmTwist3Field"], name + ":Rig_Settings.lForearmTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftArmForm"], edit = True, af = [(text, "top", 282),(text, "left", 200)])
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_leftArmForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Left Arm Rig Settings", c = partial(self.selectRigSettings_Specific, "leftArm"))
#RIGHT ARM
self.widgets[name + "_rigSettings_rightArmFrame"] = cmds.frameLayout(label = "Right Arm", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "RightArmSettings"), ec = partial(self.expandCommand, name, "RightArmSettings"))
self.widgets[name + "_rigSettings_rightArmForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_rightArmFrame"])
#rig mode
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_rightArmMode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_rightArmFkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchArmMode, name, "FK", "r"))
self.widgets[name + "_rigSettings_rightArmIkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchArmMode, name, "IK", "r"))
mode = cmds.getAttr(name + ":Rig_Settings.rArmMode")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmFkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_rightArmFkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmIkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_rightArmIkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmIkModeButton"], edit = True, select = True)
#fk orientation space
text2 = cmds.text(label = "FK Arm Orientation Space:", font = "boldLabelFont")
self.widgets[name + "rigSettings_rightArmOrient_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_rightArmFkOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsClav_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsClav_on.bmp", w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 0, "r"))
self.widgets[name + "_rigSettings_rightArmBodyOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsBody_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsBody_on.bmp",w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 1, "r"))
self.widgets[name + "_rigSettings_rightArmWorldOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsWorld_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsWorld_on.bmp",w = 115, h = 30, onc = partial(self.switchArmOrientMode, name, 2, "r"))
mode = cmds.getAttr(name + ":Rig_Settings.rFkArmOrient")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text2, "top", 85),(text2, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmFkOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_rightArmFkOrientModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmBodyOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_rightArmBodyOrientModeButton"], "left", 138)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmWorldOrientModeButton"], "top", 105),(self.widgets[name + "_rigSettings_rightArmWorldOrientModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmFkOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmBodyOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmWorldOrientModeButton"], edit = True, select = True)
#ik stretch settings
text3 = cmds.text(label = "IK Arm Stretch Settings", font = "boldLabelFont")
text4 = cmds.text(label = "Stretch")
text5 = cmds.text(label = "Squash")
stetchVal = cmds.getAttr(name + ":ik_wrist_r_anim.stretch")
try:
squashVal = cmds.getAttr(name + ":ik_wrist_r_anim.squash")
except:
pass
try:
self.widgets[name + "_rigSettings_rightArmIkStretchField"] = cmds.floatField( minValue=0, maxValue=1, value=stetchVal, w = 100)
except:
cmds.warning("Right Arm IK Stretch above or below the allowed range.")
try:
self.widgets[name + "_rigSettings_rightArmIkStretchBiasField"] = cmds.floatField( minValue=0, maxValue=1, value=squashVal, w = 100 )
except:
cmds.warning("Right Arm IK squash above or below the allowed range.")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text3, "top", 145),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text4, "top", 173),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text5, "top", 173),(text5, "left", 220)])
try:
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmIkStretchField"], "top", 170),(self.widgets[name + "_rigSettings_rightArmIkStretchField"], "left", 55)])
except:
pass
try:
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmIkStretchBiasField"], "top", 170),(self.widgets[name + "_rigSettings_rightArmIkStretchBiasField"], "left", 250)])
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmIkStretchField"], name + ":ik_wrist_r_anim.stretch" )
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmIkStretchBiasField"], name + ":ik_wrist_r_anim.squash")
except:
pass
#arm roll settings
text6 = cmds.text(label = "Arm Roll Settings", font = "boldLabelFont")
text7 = cmds.text(label = "Upper Arm Twist Amount:")
text8 = cmds.text(label = "Lower Arm Twist Amount:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text6, "top", 200),(text6, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text7, "top", 223),(text7, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text8, "top", 223),(text8, "left", 220)])
if cmds.objExists(name + ":Rig_Settings.rUpperarmTwistAmount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.rUpperarmTwistAmount")
self.widgets[name + "_rigSettings_rightArmUpArmTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmUpArmTwistField"], "top", 240),(self.widgets[name + "_rigSettings_rightArmUpArmTwistField"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmUpArmTwistField"], name + ":Rig_Settings.rUpperarmTwistAmount" )
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 242),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.rUpperarmTwist2Amount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.rUpperarmTwist2Amount")
self.widgets[name + "_rigSettings_rightArmUpArmTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmUpArmTwist2Field"], "top", 260),(self.widgets[name + "_rigSettings_rightArmUpArmTwist2Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmUpArmTwist2Field"], name + ":Rig_Settings.rUpperarmTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 262),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.rUpperarmTwist3Amount"):
upArmVal = cmds.getAttr(name + ":Rig_Settings.rUpperarmTwist3Amount")
self.widgets[name + "_rigSettings_rightArmUpArmTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmUpArmTwist3Field"], "top", 280),(self.widgets[name + "_rigSettings_rightArmUpArmTwist3Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmUpArmTwist3Field"], name + ":Rig_Settings.rUpperarmTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 282),(text, "left", 10)])
#lower arm
if cmds.objExists(name + ":Rig_Settings.rForearmTwistAmount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.rForearmTwistAmount")
self.widgets[name + "_rigSettings_rightArmLowArmTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100 )
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmLowArmTwistField"], "top", 240),(self.widgets[name + "_rigSettings_rightArmLowArmTwistField"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmLowArmTwistField"], name + ":Rig_Settings.rForearmTwistAmount")
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 242),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.rForearmTwist2Amount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.rForearmTwist2Amount")
self.widgets[name + "_rigSettings_rightArmLowArmTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmLowArmTwist2Field"], "top", 260),(self.widgets[name + "_rigSettings_rightArmLowArmTwist2Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmLowArmTwist2Field"], name + ":Rig_Settings.rForearmTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 262),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.rForearmTwist3Amount"):
lowArmVal = cmds.getAttr(name + ":Rig_Settings.rForearmTwist3Amount")
self.widgets[name + "_rigSettings_rightArmLowArmTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowArmVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightArmLowArmTwist3Field"], "top", 280),(self.widgets[name + "_rigSettings_rightArmLowArmTwist3Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightArmLowArmTwist3Field"], name + ":Rig_Settings.rForearmTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightArmForm"], edit = True, af = [(text, "top", 282),(text, "left", 200)])
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_rightArmForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Right Arm Rig Settings", c = partial(self.selectRigSettings_Specific, "rightArm"))
#LEFT LEG
self.widgets[name + "_rigSettings_leftLegFrame"] = cmds.frameLayout(label = "Left Leg", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "LeftLegSettings"), ec = partial(self.expandCommand, name, "LeftLegSettings"))
self.widgets[name + "_rigSettings_leftLegForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_leftLegFrame"])
#rig mode
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_leftLegMode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_leftLegFkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchLegMode, name, "FK", "l"))
self.widgets[name + "_rigSettings_leftLegIkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchLegMode, name, "IK", "l"))
mode = cmds.getAttr(name + ":Rig_Settings.lLegMode")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegFkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_leftLegFkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegIkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_leftLegIkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftLegFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftLegIkModeButton"], edit = True, select = True)
#ik stretch settings
text3 = cmds.text(label = "IK Leg Stretch Settings", font = "boldLabelFont")
text4 = cmds.text(label = "Stretch")
text5 = cmds.text(label = "Squash")
stetchVal = cmds.getAttr(name + ":ik_foot_anim_l.stretch")
try:
squashVal = cmds.getAttr(name + ":ik_foot_anim_l.squash")
except:
pass
try:
self.widgets[name + "_rigSettings_leftLegIkStretchField"] = cmds.floatField( minValue=0, maxValue=1, value=stetchVal, w = 100)
except:
pass
try:
self.widgets[name + "_rigSettings_leftLegIkStretchBiasField"] = cmds.floatField( minValue=0, maxValue=1, value=squashVal, w = 100 )
except:
pass
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text3, "top", 85),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text4, "top", 108),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text5, "top", 108),(text5, "left", 220)])
try:
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegIkStretchField"], "top", 105),(self.widgets[name + "_rigSettings_leftLegIkStretchField"], "left", 55)])
except:
pass
try:
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegIkStretchBiasField"], "top", 105),(self.widgets[name + "_rigSettings_leftLegIkStretchBiasField"], "left", 250)])
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegIkStretchField"], name + ":ik_foot_anim_l.stretch" )
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegIkStretchBiasField"], name + ":ik_foot_anim_l.squash")
except:
pass
#leg roll settings
text6 = cmds.text(label = "Leg Roll Settings", font = "boldLabelFont")
text7 = cmds.text(label = "Upper Leg Twist Amount:")
text8 = cmds.text(label = "Lower Leg Twist Amount:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text6, "top", 145),(text6, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text7, "top", 173),(text7, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text8, "top", 173),(text8, "left", 220)])
if cmds.objExists(name + ":Rig_Settings.lThighTwistAmount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.lThighTwistAmount")
self.widgets[name + "_rigSettings_leftLegThighTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegThighTwistField"], "top", 190),(self.widgets[name + "_rigSettings_leftLegThighTwistField"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegThighTwistField"], name + ":Rig_Settings.lThighTwistAmount" )
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 192),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.lThighTwist2Amount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.lThighTwist2Amount")
self.widgets[name + "_rigSettings_leftLegThighTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegThighTwist2Field"], "top", 210),(self.widgets[name + "_rigSettings_leftLegThighTwist2Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegThighTwist2Field"], name + ":Rig_Settings.lThighTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 212),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.lThighTwist3Amount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.lThighTwist3Amount")
self.widgets[name + "_rigSettings_leftLegThighTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegThighTwist3Field"], "top", 230),(self.widgets[name + "_rigSettings_leftLegThighTwist3Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegThighTwist3Field"], name + ":Rig_Settings.lThighTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 232),(text, "left", 10)])
#lower leg
if cmds.objExists(name + ":Rig_Settings.lCalfTwistAmount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.lCalfTwistAmount")
self.widgets[name + "_rigSettings_leftLegCalfTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100 )
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegCalfTwistField"], "top", 190),(self.widgets[name + "_rigSettings_leftLegCalfTwistField"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegCalfTwistField"], name + ":Rig_Settings.lCalfTwistAmount")
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 192),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.lCalfTwist2Amount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.lCalfTwist2Amount")
self.widgets[name + "_rigSettings_leftLegCalfTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegCalfTwist2Field"], "top", 210),(self.widgets[name + "_rigSettings_leftLegCalfTwist2Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegCalfTwist2Field"], name + ":Rig_Settings.lCalfTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 212),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.lCalfTwist3Amount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.lCalfTwist3Amount")
self.widgets[name + "_rigSettings_leftLegCalfTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_leftLegCalfTwist3Field"], "top", 230),(self.widgets[name + "_rigSettings_leftLegCalfTwist3Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_leftLegCalfTwist3Field"], name + ":Rig_Settings.lCalfTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_leftLegForm"], edit = True, af = [(text, "top", 232),(text, "left", 200)])
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_leftLegForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Left Leg Rig Settings", c = partial(self.selectRigSettings_Specific, "leftLeg"))
#RIGHT LEG
self.widgets[name + "_rigSettings_rightLegFrame"] = cmds.frameLayout(label = "Right Leg", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "RightLegSettings"), ec = partial(self.expandCommand, name, "RightLegSettings"))
self.widgets[name + "_rigSettings_rightLegForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_rightLegFrame"])
#rig mode
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_rightLegMode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_rightLegFkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchLegMode, name, "FK", "r"))
self.widgets[name + "_rigSettings_rightLegIkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchLegMode, name, "IK", "r"))
mode = cmds.getAttr(name + ":Rig_Settings.rLegMode")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegFkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_rightLegFkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegIkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_rightLegIkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightLegFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightLegIkModeButton"], edit = True, select = True)
#ik stretch settings
text3 = cmds.text(label = "IK Leg Stretch Settings", font = "boldLabelFont")
text4 = cmds.text(label = "Stretch")
text5 = cmds.text(label = "Squash")
stetchVal = cmds.getAttr(name + ":ik_foot_anim_r.stretch")
try:
squashVal = cmds.getAttr(name + ":ik_foot_anim_r.squash")
except:
pass
try:
self.widgets[name + "_rigSettings_rightLegIkStretchField"] = cmds.floatField( minValue=0, maxValue=1, value=stetchVal, w = 100)
except:
pass
try:
self.widgets[name + "_rigSettings_rightLegIkStretchBiasField"] = cmds.floatField( minValue=0, maxValue=1, value=squashVal, w = 100 )
except:
pass
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text3, "top", 85),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text4, "top", 108),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text5, "top", 108),(text5, "left", 220)])
try:
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegIkStretchField"], "top", 105),(self.widgets[name + "_rigSettings_rightLegIkStretchField"], "left", 55)])
except:
pass
try:
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegIkStretchBiasField"], "top", 105),(self.widgets[name + "_rigSettings_rightLegIkStretchBiasField"], "left", 250)])
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegIkStretchField"], name + ":ik_foot_anim_r.stretch" )
except:
pass
try:
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegIkStretchBiasField"], name + ":ik_foot_anim_r.squash")
except:
pass
#leg roll settings
text6 = cmds.text(label = "Leg Roll Settings", font = "boldLabelFont")
text7 = cmds.text(label = "Upper Leg Twist Amount:")
text8 = cmds.text(label = "Lower Leg Twist Amount:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text6, "top", 145),(text6, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text7, "top", 173),(text7, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text8, "top", 173),(text8, "left", 220)])
if cmds.objExists(name + ":Rig_Settings.rThighTwistAmount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.rThighTwistAmount")
self.widgets[name + "_rigSettings_rightLegThighTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegThighTwistField"], "top", 190),(self.widgets[name + "_rigSettings_rightLegThighTwistField"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegThighTwistField"], name + ":Rig_Settings.rThighTwistAmount" )
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 192),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.rThighTwist2Amount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.rThighTwist2Amount")
self.widgets[name + "_rigSettings_rightLegThighTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegThighTwist2Field"], "top", 210),(self.widgets[name + "_rigSettings_rightLegThighTwist2Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegThighTwist2Field"], name + ":Rig_Settings.rThighTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 212),(text, "left", 10)])
if cmds.objExists(name + ":Rig_Settings.rThighTwist3Amount"):
upLegVal = cmds.getAttr(name + ":Rig_Settings.rThighTwist3Amount")
self.widgets[name + "_rigSettings_rightLegThighTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=upLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegThighTwist3Field"], "top", 230),(self.widgets[name + "_rigSettings_rightLegThighTwist3Field"], "left", 55)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegThighTwist3Field"], name + ":Rig_Settings.rThighTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 232),(text, "left", 10)])
#lower leg
if cmds.objExists(name + ":Rig_Settings.rCalfTwistAmount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.rCalfTwistAmount")
self.widgets[name + "_rigSettings_rightLegCalfTwistField"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100 )
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegCalfTwistField"], "top", 190),(self.widgets[name + "_rigSettings_rightLegCalfTwistField"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegCalfTwistField"], name + ":Rig_Settings.rCalfTwistAmount")
text = cmds.text(label = "Twist 1:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 192),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.rCalfTwist2Amount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.rCalfTwist2Amount")
self.widgets[name + "_rigSettings_rightLegCalfTwist2Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegCalfTwist2Field"], "top", 210),(self.widgets[name + "_rigSettings_rightLegCalfTwist2Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegCalfTwist2Field"], name + ":Rig_Settings.rCalfTwist2Amount" )
text = cmds.text(label = "Twist 2:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 212),(text, "left", 200)])
if cmds.objExists(name + ":Rig_Settings.rCalfTwist3Amount"):
lowLegVal = cmds.getAttr(name + ":Rig_Settings.rCalfTwist3Amount")
self.widgets[name + "_rigSettings_rightLegCalfTwist3Field"] = cmds.floatField( minValue=-1, maxValue=2, value=lowLegVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(self.widgets[name + "_rigSettings_rightLegCalfTwist3Field"], "top", 230),(self.widgets[name + "_rigSettings_rightLegCalfTwist3Field"], "left", 250)])
cmds.connectControl( self.widgets[name + "_rigSettings_rightLegCalfTwist3Field"], name + ":Rig_Settings.rCalfTwist3Amount" )
text = cmds.text(label = "Twist 3:")
cmds.formLayout(self.widgets[name + "_rigSettings_rightLegForm"], edit = True, af = [(text, "top", 232),(text, "left", 200)])
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_rightLegForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Right Leg Rig Settings", c = partial(self.selectRigSettings_Specific, "rightLeg"))
#SPINE SETTINGS
if cmds.objExists(name + ":chest_ik_anim"):
self.widgets[name + "_rigSettings_spineFrame"] = cmds.frameLayout(label = "Spine", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "SpineSettings"), ec = partial(self.expandCommand, name, "SpineSettings"))
self.widgets[name + "_rigSettings_spineForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_spineFrame"])
#rig mode
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_spine_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_spineFkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchSpineMode, name, "FK"))
self.widgets[name + "_rigSettings_spineIkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchSpineMode, name, "IK"))
mode1 = cmds.getAttr(name + ":Rig_Settings.spine_fk")
mode2 = cmds.getAttr(name + ":Rig_Settings.spine_ik")
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(self.widgets[name + "_rigSettings_spineFkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_spineFkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(self.widgets[name + "_rigSettings_spineIkModeButton"], "top", 25),(self.widgets[name + "_rigSettings_spineIkModeButton"], "right", 20)])
if mode1 > mode2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_spineFkModeButton"], edit = True, select = True)
else:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_spineIkModeButton"], edit = True, select = True)
if cmds.objExists(name + ":chest_ik_anim"):
#ik stretch settings
text3 = cmds.text(label = "IK Spine Settings", font = "boldLabelFont")
text4 = cmds.text(label = "Stretch")
text5 = cmds.text(label = "Twist Amount")
stetchVal = cmds.getAttr(name + ":chest_ik_anim.stretch")
twistVal = cmds.getAttr(name + ":chest_ik_anim.twist_amount")
self.widgets[name + "_rigSettings_chestIkStretchField"] = cmds.floatField( minValue=-1, maxValue=2, value=stetchVal, w = 100)
self.widgets[name + "_rigSettings_chestIkTwistField"] = cmds.floatField( minValue=-1, maxValue=5, value=twistVal, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(text3, "top", 85),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(text4, "top", 108),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(text5, "top", 108),(text5, "left", 210)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(self.widgets[name + "_rigSettings_chestIkStretchField"], "top", 105),(self.widgets[name + "_rigSettings_chestIkStretchField"], "left", 55)])
cmds.formLayout(self.widgets[name + "_rigSettings_spineForm"], edit = True, af = [(self.widgets[name + "_rigSettings_chestIkTwistField"], "top", 105),(self.widgets[name + "_rigSettings_chestIkTwistField"], "left", 280)])
cmds.connectControl( self.widgets[name + "_rigSettings_chestIkStretchField"], name + ":chest_ik_anim.stretch" )
cmds.connectControl( self.widgets[name + "_rigSettings_chestIkTwistField"], name + ":chest_ik_anim.twist_amount" )
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_spineForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Spine Settings", c = partial(self.selectRigSettings_Specific, "spine"))
#HEAD SETTINGS
self.widgets[name + "_rigSettings_headFrame"] = cmds.frameLayout(label = "Head", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "HeadSettings"), ec = partial(self.expandCommand, name, "HeadSettings"))
self.widgets[name + "_rigSettings_headForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_headFrame"])
#fk orientation space
text1 = cmds.text(label = "Head Orientation Space:", font = "boldLabelFont")
self.widgets[name + "rigSettings_headOrient_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_headFkOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsNeck_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsNeck_on.bmp", w = 70, h = 30, onc = partial(self.switchHeadOrientMode, name, 0))
self.widgets[name + "_rigSettings_headShoulderOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsChest_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsChest_on.bmp",w = 70, h = 30, onc = partial(self.switchHeadOrientMode, name, 1))
self.widgets[name + "_rigSettings_headBodyOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsBody_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsBody_on.bmp",w = 70, h = 30, onc = partial(self.switchHeadOrientMode, name, 2))
self.widgets[name + "_rigSettings_headWorldOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsWorld_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsWorld_on.bmp",w = 70, h = 30, onc = partial(self.switchHeadOrientMode, name, 3))
mode = cmds.getAttr(name + ":head_fk_anim.fkOrientation")
cmds.formLayout(self.widgets[name + "_rigSettings_headForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_headForm"], edit = True, af = [(self.widgets[name + "_rigSettings_headFkOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_headFkOrientModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_headForm"], edit = True, af = [(self.widgets[name + "_rigSettings_headShoulderOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_headShoulderOrientModeButton"], "left", 95)])
cmds.formLayout(self.widgets[name + "_rigSettings_headForm"], edit = True, af = [(self.widgets[name + "_rigSettings_headBodyOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_headBodyOrientModeButton"], "left", 180)])
cmds.formLayout(self.widgets[name + "_rigSettings_headForm"], edit = True, af = [(self.widgets[name + "_rigSettings_headWorldOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_headWorldOrientModeButton"], "left", 265)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headFkOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headShoulderOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headBodyOrientModeButton"], edit = True, select = True)
if mode == 3:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headWorldOrientModeButton"], edit = True, select = True)
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_headForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Head Settings", c = partial(self.selectRigSettings_Specific, "head"))
try:
#NECK SETTINGS
self.widgets[name + "_rigSettings_neckFrame"] = cmds.frameLayout(label = "Neck", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "NeckSettings"), ec = partial(self.expandCommand, name, "NeckSettings"))
self.widgets[name + "_rigSettings_neckForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_neckFrame"])
#fk orientation space
text1 = cmds.text(label = "Neck Orientation Space:", font = "boldLabelFont")
self.widgets[name + "rigSettings_neckOrient_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_neckShoulderOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsChest_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsChest_on.bmp",w = 70, h = 30, onc = partial(self.switchNeckOrientMode, name, 0))
self.widgets[name + "_rigSettings_neckBodyOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsBody_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsBody_on.bmp",w = 70, h = 30, onc = partial(self.switchNeckOrientMode, name, 1))
self.widgets[name + "_rigSettings_neckWorldOrientModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/headSettingsWorld_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/headSettingsWorld_on.bmp",w = 70, h = 30, onc = partial(self.switchNeckOrientMode, name, 2))
mode = cmds.getAttr(name + ":neck_01_fk_anim.fkOrientation")
cmds.formLayout(self.widgets[name + "_rigSettings_neckForm"], edit = True, af = [(text1, "top", 5),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_neckForm"], edit = True, af = [(self.widgets[name + "_rigSettings_neckShoulderOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_neckShoulderOrientModeButton"], "left", 95)])
cmds.formLayout(self.widgets[name + "_rigSettings_neckForm"], edit = True, af = [(self.widgets[name + "_rigSettings_neckBodyOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_neckBodyOrientModeButton"], "left", 180)])
cmds.formLayout(self.widgets[name + "_rigSettings_neckForm"], edit = True, af = [(self.widgets[name + "_rigSettings_neckWorldOrientModeButton"], "top", 25),(self.widgets[name + "_rigSettings_neckWorldOrientModeButton"], "left", 265)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckShoulderOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckBodyOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckWorldOrientModeButton"], edit = True, select = True)
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_neckForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Neck Settings", c = partial(self.selectRigSettings_Specific, "neck"))
except:
pass
#AUTO CONTROL SETTINGS
self.widgets[name + "_rigSettings_autoControlsFrame"] = cmds.frameLayout(label = "Auto Controls", w = 400, h = 30, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in", cc = partial(self.collapseCommand, name, "AutoControlsSettings"), ec = partial(self.expandCommand, name, "AutoControlsSettings"))
self.widgets[name + "_rigSettings_autoControlsForm"] = cmds.formLayout(w = 400, h = 300, parent = self.widgets[name + "_rigSettings_autoControlsFrame"])
text1 = cmds.text(label ="Auto Hips:", font = "boldLabelFont")
text2 = cmds.text(label ="Auto Spine:", font = "boldLabelFont")
text3 = cmds.text(label ="Auto Left Clavicle:", font = "boldLabelFont")
text4 = cmds.text(label ="Auto Right Clavicle:", font = "boldLabelFont")
self.widgets[name + "_rigSettings_autoHipsField"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
self.widgets[name + "_rigSettings_autoSpineField"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
self.widgets[name + "_rigSettings_autoClavLeftField"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
self.widgets[name + "_rigSettings_autoClavRightField"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(text1, "top", 8),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(text2, "top", 38),(text2, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(text3, "top", 68),(text3, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(text4, "top", 98),(text4, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(self.widgets[name + "_rigSettings_autoHipsField"], "top", 5),(self.widgets[name + "_rigSettings_autoHipsField"], "left", 150)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(self.widgets[name + "_rigSettings_autoSpineField"], "top", 35),(self.widgets[name + "_rigSettings_autoSpineField"], "left", 150)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(self.widgets[name + "_rigSettings_autoClavLeftField"], "top", 65),(self.widgets[name + "_rigSettings_autoClavLeftField"], "left", 150)])
cmds.formLayout(self.widgets[name + "_rigSettings_autoControlsForm"], edit = True, af = [(self.widgets[name + "_rigSettings_autoClavRightField"], "top", 95),(self.widgets[name + "_rigSettings_autoClavRightField"], "left", 150)])
cmds.connectControl( self.widgets[name + "_rigSettings_autoHipsField"], name + ":hip_anim.autoHips" )
cmds.connectControl( self.widgets[name + "_rigSettings_autoSpineField"], name + ":chest_ik_anim.autoSpine" )
cmds.connectControl( self.widgets[name + "_rigSettings_autoClavLeftField"], name + ":clavicle_l_anim.autoShoulders" )
cmds.connectControl( self.widgets[name + "_rigSettings_autoClavRightField"], name + ":clavicle_r_anim.autoShoulders" )
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_autoControlsForm"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Auto Control Settings", c = partial(self.selectRigSettings_Specific, "auto"))
#LEFT FINGER SETTINGS
self.widgets[name + "_rigSettings_leftFingersFrame"] = cmds.frameLayout(label = "Left Fingers", w = 400, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in")
self.widgets[name + "_rigSettings_leftFingersColumn"] = cmds.columnLayout(parent = self.widgets[name + "_rigSettings_leftFingersFrame"])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
self.widgets[name + "_rigSettings_leftFingersFrame_" + finger] = cmds.frameLayout(label = " " + finger, w = 400, h = 60, parent = self.widgets[name + "_rigSettings_leftFingersColumn"], collapse = False, collapsable = False, borderStyle = "in")
self.widgets[name + "_rigSettings_leftFingersForm_" + finger] = cmds.formLayout(parent = self.widgets[name + "_rigSettings_leftFingersFrame_" + finger])
text = cmds.text(label = "FK Sticky:", parent = self.widgets[name + "_rigSettings_leftFingersForm_" + finger])
self.widgets[name + "_" + finger + "_fkStickyFloatFieldL"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_leftFingersForm_" + finger], edit = True, af = [(text, "top", 8),(text, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftFingersForm_" + finger], edit = True, af = [(self.widgets[name + "_" + finger + "_fkStickyFloatFieldL"], "top", 5),(self.widgets[name + "_" + finger + "_fkStickyFloatFieldL"], "left", 100)])
cmds.connectControl(self.widgets[name + "_" + finger + "_fkStickyFloatFieldL"], name + ":" + finger + "_finger_fk_ctrl_1_l.sticky" )
if cmds.objExists(name + ":" + finger + "_l_ik_anim"):
#rig mode
cmds.frameLayout(self.widgets[name + "_rigSettings_leftFingersFrame_" + finger], edit = True, h = 120)
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_LeftFinger_" + finger + "_Mode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "rigSettings_LeftFinger_" + finger + "_FkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchFingerMode, name, finger, 0, "l"))
self.widgets[name + "rigSettings_LeftFinger_" + finger + "_IkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchFingerMode, name, finger, 1, "l"))
mode = cmds.getAttr(name + ":" + finger + "_finger_l_mode_anim.FK_IK")
cmds.formLayout(self.widgets[name + "_rigSettings_leftFingersForm_" + finger], edit = True, af = [(text1, "top", 30),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftFingersForm_" + finger], edit = True, af = [(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_FkModeButton"], "top", 45),(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_FkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_leftFingersForm_" + finger], edit = True, af = [(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_IkModeButton"], "top", 45),(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_IkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_FkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_IkModeButton"], edit = True, select = True)
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_leftFingersFrame"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Left Finger Settings", c = partial(self.selectRigSettings_Specific, "leftFingers"))
#RIGHT FINGER SETTINGS
spacer = cmds.text(label = "", parent = self.widgets[name + "_rigSettingsMainColumn"], h = 10)
self.widgets[name + "_rigSettings_rightFingersFrame"] = cmds.frameLayout(label = "Right Fingers", w = 400, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in")
self.widgets[name + "_rigSettings_rightFingersColumn"] = cmds.columnLayout(parent = self.widgets[name + "_rigSettings_rightFingersFrame"])
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
self.widgets[name + "_rigSettings_rightFingersFrame_" + finger] = cmds.frameLayout(label = " " + finger, w = 400, h = 60, parent = self.widgets[name + "_rigSettings_rightFingersColumn"], collapse = False, collapsable = False, borderStyle = "in")
self.widgets[name + "_rigSettings_rightFingersForm_" + finger] = cmds.formLayout(parent = self.widgets[name + "_rigSettings_rightFingersFrame_" + finger])
text = cmds.text(label = "FK Sticky:", parent = self.widgets[name + "_rigSettings_rightFingersForm_" + finger])
self.widgets[name + "_" + finger + "_fkStickyFloatFieldR"] = cmds.floatField( minValue=0, maxValue=1, w = 100)
cmds.formLayout(self.widgets[name + "_rigSettings_rightFingersForm_" + finger], edit = True, af = [(text, "top", 8),(text, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightFingersForm_" + finger], edit = True, af = [(self.widgets[name + "_" + finger + "_fkStickyFloatFieldR"], "top", 5),(self.widgets[name + "_" + finger + "_fkStickyFloatFieldR"], "left", 100)])
cmds.connectControl(self.widgets[name + "_" + finger + "_fkStickyFloatFieldR"], name + ":" + finger + "_finger_fk_ctrl_1_r.sticky" )
if cmds.objExists(name + ":" + finger + "_r_ik_anim"):
#rig mode
cmds.frameLayout(self.widgets[name + "_rigSettings_rightFingersFrame_" + finger], edit = True, h = 120)
text1 = cmds.text(label = "Rig Mode:", font = "boldLabelFont")
self.widgets[name + "rigSettings_RightFinger_" + finger + "_Mode_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "rigSettings_RightFinger_" + finger + "_FkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchFingerMode, name, finger, 0, "r"))
self.widgets[name + "rigSettings_RightFinger_" + finger + "_IkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp",w = 180, h = 50, onc = partial(self.switchFingerMode, name, finger, 1, "r"))
mode = cmds.getAttr(name + ":" + finger + "_finger_r_mode_anim.FK_IK")
cmds.formLayout(self.widgets[name + "_rigSettings_rightFingersForm_" + finger], edit = True, af = [(text1, "top", 30),(text1, "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightFingersForm_" + finger], edit = True, af = [(self.widgets[name + "rigSettings_RightFinger_" + finger + "_FkModeButton"], "top", 45),(self.widgets[name + "rigSettings_RightFinger_" + finger + "_FkModeButton"], "left", 10)])
cmds.formLayout(self.widgets[name + "_rigSettings_rightFingersForm_" + finger], edit = True, af = [(self.widgets[name + "rigSettings_RightFinger_" + finger + "_IkModeButton"], "top", 45),(self.widgets[name + "rigSettings_RightFinger_" + finger + "_IkModeButton"], "right", 20)])
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_RightFinger_" + finger + "_FkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_RightFinger_" + finger + "_IkModeButton"], edit = True, select = True)
#create the right click menu for selecting the settings for the left arm
popupMenu = cmds.popupMenu(b = 3, parent = self.widgets[name + "_rigSettings_rightFingersFrame"])
menu = cmds.menuItem(parent = popupMenu, label = "Select Right Finger Settings", c = partial(self.selectRigSettings_Specific, "rightFingers"))
#CUSTOM JOINT CHAINS
spacer = cmds.text(label = "", parent = self.widgets[name + "_rigSettingsMainColumn"], h = 10)
self.widgets[name + "_rigSettings_customJointChainsFrame"] = cmds.frameLayout(label = "Custom Joint Chains", w = 400, parent = self.widgets[name + "_rigSettingsMainColumn"], collapse = True, collapsable = True, borderStyle = "in")
self.widgets[name + "_rigSettings_customJointChainsColumn"] = cmds.rowColumnLayout(nc = 2, cat = [(1, "both", 5), (2, "both", 5)],parent = self.widgets[name + "_rigSettings_customJointChainsFrame"])
customJoints = []
attrs = cmds.listAttr(name + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
for joint in customJoints:
attribute = cmds.getAttr(name + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "chain" or jointType == "dynamic":
numJointsInChain = label.partition("(")[2].partition(")")[0]
label = label.partition(" (")[0]
#rig mode
cmds.text(label = "")
cmds.text(label = "")
text = cmds.text(label = label + " rig settings:", font = "boldLabelFont")
cmds.text(label = "")
cmds.text(label = "")
cmds.text(label = "")
self.widgets[name + "rigSettings_customJoints_" + label + "_Collection"] = cmds.iconTextRadioCollection()
self.widgets[name + "_rigSettings_customJoints_" + label + "fkModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsFkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchCustomChainMode, name, label, "FK"))
self.widgets[name + "_rigSettings_customJoints_" + label + "ikModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsIkMode_on.bmp", w = 180, h = 50, onc = partial(self.switchCustomChainMode, name, label, "IK"))
self.widgets[name + "_rigSettings_customJoints_" + label + "dynModeButton"] = cmds.iconTextRadioButton(image = self.mayaToolsDir + "/General/Icons/ART/rigSettingsDynMode_off.bmp",selectionImage = self.mayaToolsDir + "/General/Icons/ART/rigSettingsDynMode_on.bmp", w = 180, h = 50, onc = partial(self.switchCustomChainMode, name, label, "DYNAMIC"))
mode1 = cmds.getAttr(name + ":Rig_Settings." + label + "_fk")
mode2 = cmds.getAttr(name + ":Rig_Settings." + label + "_ik")
mode3 = cmds.getAttr(name + ":Rig_Settings." + label + "_dynamic")
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "fkModeButton"], edit = True, select = mode1)
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "ikModeButton"], edit = True, select = mode2)
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "dynModeButton"], edit = True, select = mode3)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectionScriptJob(self):
#reset all button colors
self.resetButtonColors()
#look at selection and get the buttonName attr value
selection = cmds.ls(sl = True)
for each in selection:
if "." not in each:
if cmds.objExists(each + ".buttonName"):
button = cmds.getAttr(each + ".buttonName")
#color those buttons white
cmds.button(button, edit = True, bgc = self.white)
self.listView_ScriptJob()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def resetButtonColors(self):
characters = self.getCharacters()
#reset all button colors
for character in characters:
cmds.button(self.widgets[character + "_headPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":neck_01_fk_anim"):
cmds.button(self.widgets[character + "_neck1_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":neck_02_fk_anim"):
cmds.button(self.widgets[character + "_neck2_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":neck_03_fk_anim"):
cmds.button(self.widgets[character + "_neck3_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":spine_01_anim"):
cmds.button(self.widgets[character + "_spine1_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":spine_02_anim"):
cmds.button(self.widgets[character + "_spine2_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":spine_03_anim"):
cmds.button(self.widgets[character + "_spine3_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":spine_04_anim"):
cmds.button(self.widgets[character + "_spine4_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":spine_05_anim"):
cmds.button(self.widgets[character + "_spine5_PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":mid_ik_anim"):
cmds.button(self.widgets[character + "_ikSpineMidPickerButton"], edit = True, bgc = self.orange)
if cmds.objExists(character + ":chest_ik_anim"):
cmds.button(self.widgets[character + "_ikSpineTopPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_bodyPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_pelvisPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftClavPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightClavPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftShoulderPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightShoulderPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftElbowPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightElbowPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftHandPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightHandPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftIkElbowPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkElbowPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftIkHandPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkHandPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftThighPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightThighPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftFkKneePickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightFkKneePickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftFkAnklePickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightFkAnklePickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":fk_ball_l_anim"):
cmds.button(self.widgets[character + "_leftFkBallPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":fk_ball_r_anim"):
cmds.button(self.widgets[character + "_rightFkBallPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftIkFootPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkFootPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftIkHeelPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkHeelPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftIkToeWigglePickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkToeWigglePickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftIkToePickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIkToePickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_masterPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_offsetPickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rootPickerButton"], edit = True, bgc = self.purple)
cmds.button(self.widgets[character + "_headGroupPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftArmGroupPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightArmGroupPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_spineGroupPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftLegGroupPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightLegGroupPickerButton"], edit = True, bgc = self.green)
if cmds.objExists(character + ":upperarm_l_twist_anim"):
cmds.button(self.widgets[character + "_leftArmRollPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":upperarm_l_twist_2_anim"):
cmds.button(self.widgets[character + "_leftArmRoll2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":upperarm_l_twist_3_anim"):
cmds.button(self.widgets[character + "_leftArmRoll3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":upperarm_r_twist_anim"):
cmds.button(self.widgets[character + "_rightArmRollPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":upperarm_r_twist_2_anim"):
cmds.button(self.widgets[character + "_rightArmRoll2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":upperarm_r_twist_3_anim"):
cmds.button(self.widgets[character + "_rightArmRoll3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_l_twist_anim"):
cmds.button(self.widgets[character + "_leftForeTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_l_twist2_anim"):
cmds.button(self.widgets[character + "_leftForeTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_l_twist3_anim"):
cmds.button(self.widgets[character + "_leftForeTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_r_twist_anim"):
cmds.button(self.widgets[character + "_rightForeTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_r_twist2_anim"):
cmds.button(self.widgets[character + "_rightForeTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":lowerarm_r_twist3_anim"):
cmds.button(self.widgets[character + "_rightForeTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":l_thigh_twist_01_anim"):
cmds.button(self.widgets[character + "_leftThighTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":l_thigh_twist_02_anim"):
cmds.button(self.widgets[character + "_leftThighTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":l_thigh_twist_03_anim"):
cmds.button(self.widgets[character + "_leftThighTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":r_thigh_twist_01_anim"):
cmds.button(self.widgets[character + "_rightThighTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":r_thigh_twist_02_anim"):
cmds.button(self.widgets[character + "_rightThighTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":r_thigh_twist_03_anim"):
cmds.button(self.widgets[character + "_rightThighTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_l_twist_anim"):
cmds.button(self.widgets[character + "_leftCalfTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_l_twist2_anim"):
cmds.button(self.widgets[character + "_leftCalfTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_l_twist3_anim"):
cmds.button(self.widgets[character + "_leftCalfTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_r_twist_anim"):
cmds.button(self.widgets[character + "_rightCalfTwistPickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_r_twist2_anim"):
cmds.button(self.widgets[character + "_rightCalfTwist2PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":calf_r_twist3_anim"):
cmds.button(self.widgets[character + "_rightCalfTwist3PickerButton"], edit = True, bgc = self.purple)
if cmds.objExists(character + ":pinky_metacarpal_ctrl_l"):
cmds.button(self.widgets[character + "_leftPinkyMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_1_l"):
cmds.button(self.widgets[character + "_leftPinky1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_2_l"):
cmds.button(self.widgets[character + "_leftPinky2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_3_l"):
cmds.button(self.widgets[character + "_leftPinky3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_metacarpal_ctrl_l"):
cmds.button(self.widgets[character + "_leftRingMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_1_l"):
cmds.button(self.widgets[character + "_leftRing1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_2_l"):
cmds.button(self.widgets[character + "_leftRing2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_3_l"):
cmds.button(self.widgets[character + "_leftRing3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_metacarpal_ctrl_l"):
cmds.button(self.widgets[character + "_leftMiddleMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_1_l"):
cmds.button(self.widgets[character + "_leftMiddle1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_2_l"):
cmds.button(self.widgets[character + "_leftMiddle2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_3_l"):
cmds.button(self.widgets[character + "_leftMiddle3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_metacarpal_ctrl_l"):
cmds.button(self.widgets[character + "_leftIndexMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_1_l"):
cmds.button(self.widgets[character + "_leftIndex1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_2_l"):
cmds.button(self.widgets[character + "_leftIndex2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_3_l"):
cmds.button(self.widgets[character + "_leftIndex3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_1_l"):
cmds.button(self.widgets[character + "_leftThumb1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_2_l"):
cmds.button(self.widgets[character + "_leftThumb2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_3_l"):
cmds.button(self.widgets[character + "_leftThumb3PickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_leftMetaRowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftKnuckle1RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftKnuckle2RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftKnuckle3RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftIndexColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftMiddleColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftRingColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftPinkyColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftThumbColumnPickerButton"], edit = True, bgc = self.green)
createLeftIKRow = False
if cmds.objExists(character + ":index_l_ik_anim"):
cmds.button(self.widgets[character + "_leftIndexFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftIndexIkPvPickerButton"], edit = True, bgc = self.orange)
createLeftIKRow = True
if cmds.objExists(character + ":middle_l_ik_anim"):
cmds.button(self.widgets[character + "_leftMiddleFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftMiddleIkPvPickerButton"], edit = True, bgc = self.orange)
createLeftIKRow = True
if cmds.objExists(character + ":ring_l_ik_anim"):
cmds.button(self.widgets[character + "_leftRingFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftRingIkPvPickerButton"], edit = True, bgc = self.orange)
createLeftIKRow = True
if cmds.objExists(character + ":pinky_l_ik_anim"):
cmds.button(self.widgets[character + "_leftPinkyFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftPinkyIkPvPickerButton"], edit = True, bgc = self.orange)
createLeftIKRow = True
if cmds.objExists(character + ":thumb_l_ik_anim"):
cmds.button(self.widgets[character + "_leftThumbFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_leftThumbIkPvPickerButton"], edit = True, bgc = self.orange)
createLeftIKRow = True
if createLeftIKRow:
cmds.button(self.widgets[character + "_leftIkFingersRowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_leftIkFingersPvsPickerButton"], edit = True, bgc = self.green)
if cmds.objExists(character + ":l_global_ik_anim"):
cmds.button(self.widgets[character + "_leftIkGlobalCtrlPickerButton"], edit = True, bgc = self.orange)
if cmds.objExists(character + ":pinky_metacarpal_ctrl_r"):
cmds.button(self.widgets[character + "_rightPinkyMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_1_r"):
cmds.button(self.widgets[character + "_rightPinky1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_2_r"):
cmds.button(self.widgets[character + "_rightPinky2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":pinky_finger_fk_ctrl_3_r"):
cmds.button(self.widgets[character + "_rightPinky3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_metacarpal_ctrl_r"):
cmds.button(self.widgets[character + "_rightRingMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_1_r"):
cmds.button(self.widgets[character + "_rightRing1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_2_r"):
cmds.button(self.widgets[character + "_rightRing2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":ring_finger_fk_ctrl_3_r"):
cmds.button(self.widgets[character + "_rightRing3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_metacarpal_ctrl_r"):
cmds.button(self.widgets[character + "_rightMiddleMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_1_r"):
cmds.button(self.widgets[character + "_rightMiddle1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_2_r"):
cmds.button(self.widgets[character + "_rightMiddle2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":middle_finger_fk_ctrl_3_r"):
cmds.button(self.widgets[character + "_rightMiddle3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_metacarpal_ctrl_r"):
cmds.button(self.widgets[character + "_rightIndexMetacarpalPickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_1_r"):
cmds.button(self.widgets[character + "_rightIndex1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_2_r"):
cmds.button(self.widgets[character + "_rightIndex2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":index_finger_fk_ctrl_3_r"):
cmds.button(self.widgets[character + "_rightIndex3PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_1_r"):
cmds.button(self.widgets[character + "_rightThumb1PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_2_r"):
cmds.button(self.widgets[character + "_rightThumb2PickerButton"], edit = True, bgc = self.blue)
if cmds.objExists(character + ":thumb_finger_fk_ctrl_3_r"):
cmds.button(self.widgets[character + "_rightThumb3PickerButton"], edit = True, bgc = self.blue)
cmds.button(self.widgets[character + "_rightMetaRowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightKnuckle1RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightKnuckle2RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightKnuckle3RowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightIndexColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightMiddleColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightRingColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightPinkyColumnPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightThumbColumnPickerButton"], edit = True, bgc = self.green)
createRightIKRow = False
if cmds.objExists(character + ":index_r_ik_anim"):
cmds.button(self.widgets[character + "_rightIndexFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightIndexIkPvPickerButton"], edit = True, bgc = self.orange)
createRightIKRow = True
if cmds.objExists(character + ":middle_r_ik_anim"):
cmds.button(self.widgets[character + "_rightMiddleFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightMiddleIkPvPickerButton"], edit = True, bgc = self.orange)
createRightIKRow = True
if cmds.objExists(character + ":ring_r_ik_anim"):
cmds.button(self.widgets[character + "_rightRingFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightRingIkPvPickerButton"], edit = True, bgc = self.orange)
createRightIKRow = True
if cmds.objExists(character + ":pinky_r_ik_anim"):
cmds.button(self.widgets[character + "_rightPinkyFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightPinkyIkPvPickerButton"], edit = True, bgc = self.orange)
createRightIKRow = True
if cmds.objExists(character + ":thumb_r_ik_anim"):
cmds.button(self.widgets[character + "_rightThumbFingerIKPickerButton"], edit = True, bgc = self.orange)
cmds.button(self.widgets[character + "_rightThumbIkPvPickerButton"], edit = True, bgc = self.orange)
createRightIKRow = True
if createRightIKRow:
cmds.button(self.widgets[character + "_rightIkFingersRowPickerButton"], edit = True, bgc = self.green)
cmds.button(self.widgets[character + "_rightIkFingersPvsPickerButton"], edit = True, bgc = self.green)
if cmds.objExists(character + ":r_global_ik_anim"):
cmds.button(self.widgets[character + "_rightIkGlobalCtrlPickerButton"], edit = True, bgc = self.orange)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def collapseCommand(self, name, layout, *args):
if layout == "Body":
cmds.frameLayout(self.widgets[name + "_bodyFrame"], edit = True, h = 30)
if layout == "Fingers":
cmds.frameLayout(self.widgets[name + "_fingersFrame"], edit = True, h = 30)
if layout == "Toes":
cmds.frameLayout(self.widgets[name + "_toesFrame"], edit = True, h = 30)
if layout == "Layers":
cmds.frameLayout(self.widgets[name + "_layersFrame"], edit = True, h = 30)
if layout == "LeftArmSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_leftArmFrame"], edit = True, h = 30)
if layout == "RightArmSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_rightArmFrame"], edit = True, h = 30)
if layout == "LeftLegSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_leftLegFrame"], edit = True, h = 30)
if layout == "RightLegSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_rightLegFrame"], edit = True, h = 30)
if layout == "SpineSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_spineFrame"], edit = True, h = 30)
if layout == "HeadSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_headFrame"], edit = True, h = 30)
if layout == "NeckSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_neckFrame"], edit = True, h = 30)
if layout == "AutoControlsSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_autoControlsFrame"], edit = True, h = 30)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def expandCommand(self, name, layout, height = None, *args):
if layout == "Body":
cmds.frameLayout(self.widgets[name + "_bodyFrame"], edit = True, h = 470)
if layout == "Fingers":
cmds.frameLayout(self.widgets[name + "_fingersFrame"], edit = True, h = 205)
if layout == "Toes":
cmds.frameLayout(self.widgets[name + "_toesFrame"], edit = True, h = 220)
if layout == "Layers":
cmds.frameLayout(self.widgets[name + "_layersFrame"], edit = True, h = 300)
if layout == "LeftArmSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_leftArmFrame"], edit = True, h = 330)
if layout == "RightArmSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_rightArmFrame"], edit = True, h = 330)
if layout == "LeftLegSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_leftLegFrame"], edit = True, h = 280)
if layout == "RightLegSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_rightLegFrame"], edit = True, h = 280)
if layout == "SpineSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_spineFrame"], edit = True, h = 150)
if layout == "HeadSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_headFrame"], edit = True, h = 100)
if layout == "NeckSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_neckFrame"], edit = True, h = 100)
if layout == "AutoControlsSettings":
cmds.frameLayout(self.widgets[name + "_rigSettings_autoControlsFrame"], edit = True, h = 150)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectRigSettings_Specific(self, limb, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
if limb == "leftArm":
cmds.select(character + ":Rig_Settings")
cmds.select(character + ":ik_wrist_l_anim", add = True)
if limb == "rightArm":
cmds.select(character + ":Rig_Settings")
cmds.select(character + ":ik_wrist_r_anim", add = True)
if limb == "leftLeg":
cmds.select(character + ":Rig_Settings")
cmds.select(character + ":ik_foot_anim_l", add = True)
if limb == "rightLeg":
cmds.select(character + ":Rig_Settings")
cmds.select(character + ":ik_foot_anim_r", add = True)
if limb == "spine":
cmds.select(character + ":Rig_Settings")
if cmds.objExists(character + ":chest_ik_anim"):
cmds.select(character + ":chest_ik_anim", add = True)
if limb == "head":
cmds.select(character + ":head_fk_anim")
if limb == "neck":
cmds.select(character + ":neck_01_fk_anim")
if limb == "auto":
cmds.select(character + ":clavicle_l_anim")
cmds.select(character + ":clavicle_r_anim", add = True)
cmds.select(character + ":hip_anim", add = True)
cmds.select(character + ":chest_ik_anim", add = True)
if limb == "leftFingers":
cmds.select(clear = True)
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_1_l"):
cmds.select(character + ":" + finger + "_finger_fk_ctrl_1_l", add = True)
if cmds.objExists(character + ":" + finger + "_finger_l_mode_anim"):
cmds.select(character + ":" + finger + "_finger_l_mode_anim", add = True)
if limb == "rightFingers":
cmds.select(clear = True)
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(character + ":" + finger + "_finger_fk_ctrl_1_r"):
cmds.select(character + ":" + finger + "_finger_fk_ctrl_1_r", add = True)
if cmds.objExists(character + ":" + finger + "_finger_r_mode_anim"):
cmds.select(character + ":" + finger + "_finger_r_mode_anim", add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectRigSettings(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
mods = cmds.getModifiers()
if (mods & 1) > 0:
cmds.select(character + ":Rig_Settings", tgl = True)
if (mods & 1) == 0:
cmds.select(character + ":Rig_Settings")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def findCustomSelectionSets(self, *args):
if os.path.exists(self.mayaToolsDir + "/General/ART/system/selectionSets.txt"):
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'r')
sets = cPickle.load(f)
f.close()
numSets = len(sets)
#make sure menuItem doesn't already exist
children = cmds.lsUI(mi = True)
for child in children:
if cmds.menuItem(child, q = True, docTag = True) == "customSelectionSet":
cmds.deleteUI(child)
for i in range(int(numSets)):
label = sets[i][0]
controls = sets[i][1:]
#add them to both the select sub menu and the selection sets menu
menu = cmds.menuItem(parent = self.widgets["selectionSetsCustom"], label = label, docTag = "customSelectionSet", ann = str(sets[i]), c = partial(self.selectFromCustomSet, controls))
cmds.menuItem(optionBox = True, parent = self.widgets["selectionSetsCustom"], c = partial(self.printSelectionInfo, controls, i))
menu = cmds.menuItem(parent = self.widgets["selectionSetMenuPopUp"], label = label, docTag = "customSelectionSet", ann = str(sets[i]), c = partial(self.selectFromCustomSet, controls))
cmds.menuItem(optionBox = True, parent = self.widgets["selectionSetMenuPopUp"], c = partial(self.printSelectionInfo, controls, i))
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def printSelectionInfo(self, controls, removeIndex, *args):
string = "Controls in this selection set:\n\n"
for control in controls:
string += control + " | "
result = cmds.confirmDialog(title = "Selection Details", message = string, button = ["Close", "Rename Set", "Remove This Set"], defaultButton = "Rename Set", cancelButton = "Close", dismissString = "Close", icon = "information")
if result == "Remove This Set":
if os.path.exists(self.mayaToolsDir + "/General/ART/system/selectionSets.txt"):
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'r')
sets = cPickle.load(f)
f.close()
#remove the desired set
sets.pop(removeIndex)
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'w')
cPickle.dump(sets, f)
f.close()
if result == "Rename Set":
if os.path.exists(self.mayaToolsDir + "/General/ART/system/selectionSets.txt"):
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'r')
sets = cPickle.load(f)
f.close()
#rename index 0 of the desired set
cmds.promptDialog(title = "Rename", message = "New Name:")
newName = cmds.promptDialog(q = True, text = True)
sets[removeIndex][0] = newName
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'w')
cPickle.dump(sets, f)
f.close()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createSelectionSet(self, *args):
selection = cmds.ls(sl = True)
if len(selection) > 0:
#strip off the namespace of each selected item(if it has one, if not, don't include that item)
setList = []
controls = ["Selection Set"]
for each in selection:
if each.find(":") != -1:
control = each.rpartition(":")[2]
controls.append(control)
#store this data to file in ART/system
if os.path.exists(self.mayaToolsDir + "/General/ART/system/selectionSets.txt"):
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'r')
existingSetList = cPickle.load(f)
f.close()
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'w')
existingSetList.append(controls)
cPickle.dump(existingSetList, f)
else:
f = open(self.mayaToolsDir + "/General/ART/system/selectionSets.txt", 'w')
setList.append(controls)
cPickle.dump(setList, f)
f.close()
#reload
self.findCustomSelectionSets()
else:
cmds.warning("Nothing selected to create a selection set from.")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectFromCustomSet(self, controls, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
cmds.select(clear = True)
for each in controls:
if cmds.objExists(character + ":" + each):
cmds.select(character + ":" + each, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectEverything(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
controls = list(self.controls)
controls.append("Rig_Settings")
#need to find all space switch nodes for the current character
cmds.select(character + ":*_space_switcher_follow")
nodes = cmds.ls(sl = True)
spaceSwitchers = []
for node in nodes:
if node.find("invis") == -1:
spaceSwitchers.append(node)
selectNodes = []
for control in spaceSwitchers:
spaceSwitchNode = control.rpartition("_follow")[0]
selectNodes.append(spaceSwitchNode)
cmds.select(clear = True)
for control in controls:
if cmds.objExists(character + ":" + control):
cmds.select(character + ":" + control, add = True)
for node in ["fk_orient_world_loc_l", "fk_orient_world_loc_r", "fk_orient_body_loc_l", "fk_orient_body_loc_r", "head_fk_orient_neck", "head_fk_orient_shoulder", "head_fk_orient_body", "head_fk_orient_world"]:
if cmds.objExists(character + ":" + node):
cmds.select(character + ":" + node, add = True)
for node in selectNodes:
cmds.select(node, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def selectAll(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
controls = list(self.controls)
controls.append("Rig_Settings")
for control in controls:
if cmds.objExists(character + ":" + control):
cmds.select(character + ":" + control, add = True)
cmds.warning("Custom Controls and Toes not implemented yet into select all")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def resetAll(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
avoidAttrs = ["global_scale", "scaleX", "scaleY", "scaleZ", "stretch", "twist_amount", "bias", "spine_01_twistAmount", "spine_02_twistAmount", "spine_03_twistAmount", "spine_04_twistAmount", "spine_05_twistAmount", "sticky", "fkOrientation"]
#reset fk arm orientation
cmds.setAttr(character + ":Rig_Settings.lFkArmOrient", 0)
cmds.setAttr(character + ":Rig_Settings.rFkArmOrient", 0)
for control in self.controls:
if cmds.objExists(character + ":" + control):
attrs = cmds.listAttr(character + ":" + control, keyable = True, unlocked = True)
if attrs != None:
for attr in attrs:
if attr not in avoidAttrs:
cmds.setAttr(character + ":" + control + "." + attr, 0)
if attr in ["global_scale", "scaleX", "scaleY", "scaleZ"]:
cmds.setAttr(character + ":" + control + "." + attr, 1)
cmds.warning("Custom Controls and Toes not implemented yet into reset all")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def resetSelection(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
selection = cmds.ls(sl = True)
for each in selection:
if each.find(character + ":") == 0:
attrs = cmds.listAttr(each, keyable = True, unlocked = True)
avoidAttrs = ["global_scale", "scaleX", "scaleY", "scaleZ", "stretch", "twist_amount", "bias", "spine_01_twistAmount", "spine_02_twistAmount", "spine_03_twistAmount", "spine_04_twistAmount", "spine_05_twistAmount", "sticky", "fkOrientation"]
if attrs != None:
for attr in attrs:
if attr not in avoidAttrs:
cmds.setAttr(each + "." + attr, 0)
if attr in ["global_scale", "scaleX", "scaleY", "scaleZ"]:
cmds.setAttr(each + "." + attr, 1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def importMotion(self, *args):
import ART_importMotion
reload(ART_importMotion)
ART_importMotion.ImportMotionUI()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def exportMotion(self, *args):
import ART_exportMotion
reload(ART_exportMotion)
ART_exportMotion.ExportMotionUI()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setHeadSpace(self, mode, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
if mode == 0:
cmds.menuItem(self.widgets["neckSpaceRB"], edit = True, rb = True)
cmds.setAttr(character + ":" + "head_fk_anim.fkOrientation", 0)
if mode == 1:
cmds.menuItem(self.widgets["shoulderSpaceRB"], edit = True, rb = True)
cmds.setAttr(character + ":" + "head_fk_anim.fkOrientation", 1)
if mode == 2:
cmds.menuItem(self.widgets["bodySpaceRB"], edit = True, rb = True)
cmds.setAttr(character + ":" + "head_fk_anim.fkOrientation", 2)
if mode == 3:
cmds.menuItem(self.widgets["worldSpaceRB"], edit = True, rb = True)
cmds.setAttr(character + ":" + "head_fk_anim.fkOrientation", 3)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getHeadSpace(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
space = cmds.getAttr(character + ":" + "head_fk_anim.fkOrientation")
if space == 0:
cmds.menuItem(self.widgets["neckSpaceRB"], edit = True, rb = True)
if space == 1:
cmds.menuItem(self.widgets["shoulderSpaceRB"], edit = True, rb = True)
if space == 2:
cmds.menuItem(self.widgets["bodySpaceRB"], edit = True, rb = True)
if space == 3:
cmds.menuItem(self.widgets["worldSpaceRB"], edit = True, rb = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getNeckSpace(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
space = cmds.getAttr(character + ":" + "neck_01_fk_anim.fkOrientation")
if space == 0:
cmds.menuItem(self.widgets["neckOrientShoulderSpaceRB"], edit = True, rb = True)
if space == 1:
cmds.menuItem(self.widgets["neckOrientBodySpaceRB"], edit = True, rb = True)
if space == 2:
cmds.menuItem(self.widgets["neckOrientWorldSpaceRB"], edit = True, rb = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setControlSpace(self, spaceSwitchNode, attr, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
#get match and match method settings
matching = cmds.menuItem(self.widgets["spaceSwitch_MatchToggleCB"], q = True, cb = True)
matchToControl = cmds.menuItem(self.widgets["spaceSwitch_MatchMethodCB"], q = True, cb = True)
if attr == None:
if matching:
#create temp locator to snap the space switch node to
control = character + ":" + spaceSwitchNode.rpartition("_space")[0]
currentTime = cmds.currentTime(q = True)
#set pre-frame key
if matchToControl == False:
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = (currentTime - 1))
cmds.setKeyframe(control, t = (currentTime - 1))
else:
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = (currentTime - 1))
cmds.setKeyframe(control, t = (currentTime - 1))
cmds.currentTime(currentTime - 1)
loc = cmds.spaceLocator()[0]
constraint = cmds.parentConstraint(control, loc)[0]
cmds.delete(constraint)
constraints = []
try:
constraint = cmds.pointConstraint(loc, control)[0]
constraints.append(constraint)
except:
pass
try:
constraint = cmds.orientConstraint(loc, control)[0]
constraints.append(constraint)
except:
pass
#constraint = cmds.parentConstraint(loc, control)[0]
cmds.setKeyframe(control, t = (cmds.currentTime(q = True)))
cmds.delete(constraints)
cmds.delete(loc)
#create our temp loc
tempLoc = cmds.spaceLocator()
cmds.currentTime(currentTime)
#constrain temp loc
if matchToControl == False:
constraint = cmds.parentConstraint(character + ":" + spaceSwitchNode, tempLoc[0])[0]
else:
constraint = cmds.parentConstraint(control, tempLoc[0])[0]
cmds.delete(constraint)
#match and switch space
attrs = []
try:
attrs.extend(cmds.listAttr(character + ":" + spaceSwitchNode, string = "space_*"))
except:
pass
try:
attrs.extend(cmds.listAttr(control, string = "space_*"))
except:
pass
for attribute in attrs:
if cmds.objExists(character + ":" + spaceSwitchNode+ "." + attribute):
cmds.setAttr(character + ":" + spaceSwitchNode+ "." + attribute, 0)
if cmds.objExists(control + "." + attribute):
cmds.setAttr(control + "." + attribute, 0)
if matchToControl == False:
constraint = cmds.parentConstraint(tempLoc[0], character + ":" + spaceSwitchNode)[0]
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = currentTime)
cmds.setKeyframe(control, t = currentTime)
cmds.delete(constraint)
cmds.delete(tempLoc)
cmds.select(clear = True)
else:
try:
constraint = cmds.parentConstraint(tempLoc[0], control)[0]
except:
constraint = cmds.pointConstraint(tempLoc[0], control)[0]
#zero out space node
for attr in [".tx", ".ty", ".tz", ".rx", ".ry", ".rz"]:
cmds.setAttr(character + ":" + spaceSwitchNode + attr, 0)
cmds.setKeyframe(control, t = currentTime)
cmds.delete(constraint)
cmds.delete(tempLoc)
cmds.select(clear = True)
#if we are not matching, just set attrs
else:
currentTime = cmds.currentTime(q = True)
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = (currentTime - 1))
attrs = []
try:
attrs.extend(cmds.listAttr(character + ":" + spaceSwitchNode, string = "space_*"))
except:
pass
try:
attrs.extend(cmds.listAttr(control, string = "space_*"))
except:
pass
for attr in attrs:
if cmds.objExists(character + ":" + spaceSwitchNode+ "." + attribute):
cmds.setAttr(character + ":" + spaceSwitchNode+ "." + attribute, 0)
if cmds.objExists(control + "." + attribute):
cmds.setAttr(control + "." + attribute, 0)
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = currentTime)
cmds.setKeyframe(control, t = currentTime)
#if switching to a space other than default
else:
if matching:
#create temp locator to snap the space switch node to
control = character + ":" + spaceSwitchNode.rpartition("_space")[0]
currentTime = cmds.currentTime(q = True)
#set pre-frame key
if matchToControl == False:
cmds.currentTime(currentTime - 1)
cmds.setKeyframe(character + ":" + spaceSwitchNode)
cmds.setKeyframe(control)
else:
cmds.currentTime(currentTime - 1)
cmds.setKeyframe(character + ":" + spaceSwitchNode)
cmds.setKeyframe(control)
cmds.currentTime(currentTime - 1)
loc = cmds.spaceLocator()[0]
try:
constraint = cmds.parentConstraint(control, loc)[0]
except:
constraint = cmds.pointConstraint(control, loc)[0]
cmds.delete(constraint)
try:
constraint = cmds.parentConstraint(loc, control)[0]
except:
constraint = cmds.pointConstraint(loc, control)[0]
cmds.setKeyframe(control, t = (cmds.currentTime(q = True)))
cmds.delete(constraint)
cmds.delete(loc)
#create our temp loc
tempLoc = cmds.spaceLocator()[0]
cmds.currentTime(currentTime)
#constrain temp loc
if matchToControl == False:
constraint = cmds.parentConstraint(character + ":" + spaceSwitchNode, tempLoc)[0]
else:
constraint = cmds.parentConstraint(control, tempLoc)[0]
cmds.delete(constraint)
#match and switch space
attrs = []
try:
attrs.extend(cmds.listAttr(character + ":" + spaceSwitchNode, string = "space_*"))
except:
pass
try:
attrs.extend(cmds.listAttr(control, string = "space_*"))
except:
pass
for attribute in attrs:
cmds.currentTime(currentTime)
if cmds.objExists(character + ":" + spaceSwitchNode + "." + attribute):
cmds.setAttr(character + ":" + spaceSwitchNode + "." + attribute, 0)
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = currentTime)
if cmds.objExists(control + "." + attribute):
cmds.setAttr(control + "." + attribute, 0)
cmds.setKeyframe(control, t = currentTime)
if cmds.objExists(character + ":" + spaceSwitchNode + "." + attr):
cmds.setAttr(character + ":" + spaceSwitchNode + "." + attr, 1)
cmds.setKeyframe(character + ":" + spaceSwitchNode, attribute = attr, t = currentTime)
if cmds.objExists(control + "." + attr):
cmds.setAttr(control + "." + attr, 1)
cmds.setKeyframe(control, attribute = attr, t = currentTime)
if matchToControl == False:
constraint = cmds.parentConstraint(tempLoc, character + ":" + spaceSwitchNode)[0]
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = currentTime)
cmds.delete(constraint)
cmds.delete(tempLoc)
cmds.select(clear = True)
else:
try:
constraint = cmds.parentConstraint(tempLoc, control)[0]
except:
constraint = cmds.pointConstraint(tempLoc, control)[0]
#zero out space node
for attribute in [".tx", ".ty", ".tz", ".rx", ".ry", ".rz"]:
cmds.setAttr(character + ":" + spaceSwitchNode + attribute, 0)
cmds.setKeyframe(control, t = currentTime)
cmds.delete(constraint)
cmds.delete(tempLoc)
cmds.select(clear = True)
#if we are not matching, just set attrs
else:
currentTime = cmds.currentTime(q = True)
cmds.setKeyframe(character + ":" + spaceSwitchNode, t = (currentTime - 1))
cmds.setKeyframe(control, t = (currentTime - 1))
attrs = []
try:
attrs.extend(cmds.listAttr(character + ":" + spaceSwitchNode, string = "space_*"))
except:
pass
try:
attrs.extend(cmds.listAttr(control, string = "space_*"))
except:
pass
for attribute in attrs:
if cmds.objExists(character + ":" + spaceSwitchNode+ "." + attribute):
cmds.setAttr(character + ":" + spaceSwitchNode+ "." + attribute, 0)
if cmds.objExists(control + "." + attribute):
cmds.setAttr(control + "." + attribute, 0)
if cmds.objExists(character + ":" + spaceSwitchNode+ "." + attr):
cmds.setAttr(character + ":" + spaceSwitchNode + "." + attr, 1)
cmds.setKeyframe(character + ":" + spaceSwitchNode, attribute = attr, t = currentTime)
if cmds.objExists(control + "." + attribute):
cmds.setAttr(control + "." + attr, 1)
cmds.setKeyframe(control, attribute = attr, t = currentTime)
#set the current time back
cmds.currentTime(currentTime)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getControlSpaces(self, popupMenu, radioCollection, spaceSwitchNode, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
#delete any existing children of the radioCollection
menuItems = cmds.lsUI(mi = True)
for item in menuItems:
if cmds.menuItem(item,q = True, docTag = True) == str(radioCollection):
cmds.deleteUI(item)
#add spaces to radio menu item collection
if cmds.objExists(character + ":" + spaceSwitchNode):
children = cmds.popupMenu(popupMenu, q = True, itemArray = True)
for child in children:
label = cmds.menuItem(child, q = True, label = True)
if label == "Space Switching":
parentSpace = cmds.listRelatives(character + ":" + spaceSwitchNode + "_follow", parent = True)[0]
parentSpace = parentSpace.partition(":")[2]
#add the default space
defMenu = cmds.menuItem(label = "default [" + parentSpace + "]", parent = child, cl = radioCollection, rb = False, docTag = str(radioCollection), c = partial(self.setControlSpace, spaceSwitchNode, None))
#add the rest of the spaces foud on the space switch node
control = spaceSwitchNode.partition("_space")[0]
attrs = []
try:
attrs.extend(cmds.listAttr(character + ":" + spaceSwitchNode, string = "space_*"))
except:
pass
try:
attrs.extend(cmds.listAttr(character + ":" + control, string = "space_*"))
except:
pass
found = False
for attr in attrs:
label = attr.partition("space_")[2]
if cmds.objExists(character + ":" + spaceSwitchNode + "." + attr):
value = cmds.getAttr(character + ":" + spaceSwitchNode + "." + attr)
if cmds.objExists(character + ":" + control + "." + attr):
value = cmds.getAttr(character + ":" + control + "." + attr)
if value == True:
found = True
cmds.menuItem(label = label, parent = child, cl = radioCollection, rb = value, docTag = str(radioCollection), c = partial(self.setControlSpace, spaceSwitchNode, attr))
if found == False:
cmds.menuItem(defMenu, edit = True, rb = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def spaceSwitcher(self, *args):
if cmds.symbolButton(self.widgets["activeCharacterThumb"], q= True, exists = True):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
import ART_spaceSwitcher
reload(ART_spaceSwitcher)
ART_spaceSwitcher.SpaceSwitcher(character, self)
else:
cmds.deleteUI("spaceSwitcherUI")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def createSpace(self, *args):
if cmds.symbolButton(self.widgets["activeCharacterThumb"], q= True, exists = True):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
import ART_spaceSwitcher
reload(ART_spaceSwitcher)
inst = ART_spaceSwitcher.SpaceSwitcher(character, self)
cmds.deleteUI("spaceSwitcherUI")
inst.createSpaceSwitcherSpace()
else:
cmds.warning("No Animation UI detected")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def poseEditor(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
import ART_poseEditor
reload(ART_poseEditor)
ART_poseEditor.PoseEditor_UI(character, self)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def copyPose(self, *args):
#grab selection
selection = cmds.ls(sl = True)
#get attributes from each object in selection
poseData = []
for each in selection:
control = each.partition(":")[2]
controlInfo = [control]
attrs = cmds.listAttr(each, keyable = True)
for attr in attrs:
value = cmds.getAttr(each + "." + attr)
controlInfo.append(value)
poseData.append(controlInfo)
#write pose data to file
f = open(self.mayaToolsDir + "/poseCache.txt", 'w')
cPickle.dump(poseData, f)
f.close
#change the annotation of the button to have the clipboard contents
string = "Pose Clipboard Contents:\n\n"
for pose in poseData:
control = pose[0]
string += control + "\n"
cmds.symbolButton(self.widgets["pickerPoseTools"], edit = True, ann = string)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def pastePose(self, *args):
#load pose from poseCache file
poseCacheFile = self.mayaToolsDir + "/poseCache.txt"
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
if os.path.exists(poseCacheFile):
f = open(poseCacheFile, 'r')
poseData = cPickle.load(f)
f.close()
#sort through pose data, finding control, and values
for data in poseData:
control = data[0]
newData = []
for i in range(1, int(len(data))):
newData.append(data[i])
attrs = cmds.listAttr(character + ":" + control, keyable = True, unlocked = True)
for i in range(int(len(attrs))):
cmds.setAttr(character + ":" + control + "." + attrs[i], newData[i])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def pastePreview(self, *args):
#load pose from poseCache file
poseCacheFile = self.mayaToolsDir + "/poseCache.txt"
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
if os.path.exists(poseCacheFile):
f = open(poseCacheFile, 'r')
poseData = cPickle.load(f)
f.close()
#sort through pose data, finding control, and values
cmds.select(clear = True)
for data in poseData:
control = data[0]
cmds.select(character + ":" + control, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def pasteOppositePreview(self, *args):
#load pose from poseCache file
poseCacheFile = self.mayaToolsDir + "/poseCache.txt"
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
if os.path.exists(poseCacheFile):
f = open(poseCacheFile, 'r')
poseData = cPickle.load(f)
f.close()
#sort through pose data, finding control, and values
cmds.select(clear = True)
for data in poseData:
control = data[0]
ctrl = control
if control.find("_l") != -1:
ctrl = control.rpartition("_l")[0] + "_r"
if control.find("_l_") != -1:
prefix = control.partition("_l_")[0]
suffix = control.partition("_l_")[2]
ctrl = prefix + "_r_" + suffix
if control.find("_r") != -1:
ctrl = control.rpartition("_r")[0] + "_l"
if control.find("_r_") != -1:
prefix = control.partition("_r_")[0]
suffix = control.partition("_r_")[2]
ctrl = prefix + "_l_" + suffix
cmds.select(character + ":" + ctrl, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def pastePoseOpposite(self, *args):
#load pose from poseCache file
poseCacheFile = self.mayaToolsDir + "/poseCache.txt"
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
mirrorAllTransControls = []
mirrorxTransControls = ["ik_elbow_l_anim", "ik_elbow_r_anim", "clavicle_l_anim", "clavicle_r_anim","ik_foot_anim_l", "ik_foot_anim_r", "ik_wrist_l_anim", "ik_wrist_r_anim"]
mirrorRotateZandY = ["ik_foot_anim_l", "ik_foot_anim_r", "ik_wrist_l_anim", "ik_wrist_r_anim"]
#mirrorRotateY = ["ik_wrist_l_anim", "ik_wrist_r_anim"]
if os.path.exists(poseCacheFile):
f = open(poseCacheFile, 'r')
poseData = cPickle.load(f)
f.close()
#sort through pose data, finding control, and values
for data in poseData:
control = data[0]
newData = []
for i in range(1, int(len(data))):
newData.append(data[i])
attrs = cmds.listAttr(character + ":" + control, keyable = True, unlocked = True)
if control.find("_l") != -1:
if control.rpartition("_l")[2] == "":
ctrl = control.rpartition("_l")[0] + "_r"
if ctrl in mirrorAllTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorxTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorRotateZandY:
for attr in attrs:
if attr.find("rotateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("rotateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
for i in range(int(len(attrs))):
cmds.setAttr(character + ":" + ctrl + "." + attrs[i], newData[i])
if control.find("_l_") != -1:
prefix = control.partition("_l_")[0]
suffix = control.partition("_l_")[2]
ctrl = prefix + "_r_" + suffix
if ctrl in mirrorAllTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorxTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorRotateZandY:
for attr in attrs:
if attr.find("rotateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("rotateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
for i in range(int(len(attrs))):
cmds.setAttr(character + ":" + ctrl + "." + attrs[i], newData[i])
if control.find("_r") != -1:
if control.rpartition("_r")[2] == "":
ctrl = control.rpartition("_r")[0] + "_l"
if ctrl in mirrorAllTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorxTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorRotateZandY:
for attr in attrs:
if attr.find("rotateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("rotateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
for i in range(int(len(attrs))):
cmds.setAttr(character + ":" + ctrl + "." + attrs[i], newData[i])
if control.find("_r_") != -1:
prefix = control.partition("_r_")[0]
suffix = control.partition("_r_")[2]
ctrl = prefix + "_l_" + suffix
if ctrl in mirrorAllTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("translateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorxTransControls:
for attr in attrs:
if attr.find("translateX") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if ctrl in mirrorRotateZandY:
for attr in attrs:
if attr.find("rotateY") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
if attr.find("rotateZ") == 0:
index = attrs.index(attr)
newData[index] = newData[index] * -1
for i in range(int(len(attrs))):
cmds.setAttr(character + ":" + ctrl + "." + attrs[i], newData[i])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def toggleControlVis(self, visibility, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
controls = []
for control in ["head_fk_anim", "neck_01_fk_anim", "neck_02_fk_anim", "neck_03_fk_anim", "spine_01_anim", "spine_02_anim", "spine_03_anim", "spine_04_anim", "spine_05_anim", "mid_ik_anim", "chest_ik_anim",
"body_anim", "hip_anim", "clavicle_l_anim", "clavicle_r_anim", "fk_arm_l_anim", "fk_arm_r_anim", "fk_elbow_l_anim", "fk_elbow_r_anim", "fk_wrist_l_anim", "fk_wrist_r_anim",
"ik_elbow_l_anim", "ik_elbow_r_anim", "ik_wrist_l_anim", "ik_wrist_r_anim", "fk_thigh_l_anim", "fk_thigh_r_anim", "fk_calf_l_anim", "fk_calf_r_anim", "fk_foot_l_anim", "fk_foot_r_anim",
"fk_ball_l_anim", "fk_ball_r_anim", "ik_foot_anim_l", "ik_foot_anim_r", "heel_ctrl_l", "heel_ctrl_r", "toe_wiggle_ctrl_l", "toe_wiggle_ctrl_r",
"toe_tip_ctrl_l", "toe_tip_ctrl_r", "master_anim", "offset_anim", "root_anim", "upperarm_l_twist_anim", "upperarm_l_twist_2_anim", "upperarm_l_twist_3_anim", "upperarm_r_twist_anim", "upperarm_r_twist_2_anim", "upperarm_r_twist_3_anim", "l_thigh_twist_01_anim", "r_thigh_twist_01_anim",
"pinky_metacarpal_ctrl_l", "pinky_metacarpal_ctrl_r", "pinky_finger_fk_ctrl_1_l", "pinky_finger_fk_ctrl_1_r", "pinky_finger_fk_ctrl_2_l", "pinky_finger_fk_ctrl_2_r", "pinky_finger_fk_ctrl_3_l", "pinky_finger_fk_ctrl_3_r",
"ring_metacarpal_ctrl_l", "ring_metacarpal_ctrl_r", "ring_finger_fk_ctrl_1_l", "ring_finger_fk_ctrl_1_r", "ring_finger_fk_ctrl_2_l", "ring_finger_fk_ctrl_2_r", "ring_finger_fk_ctrl_3_l", "ring_finger_fk_ctrl_3_r",
"middle_metacarpal_ctrl_l", "middle_metacarpal_ctrl_r", "middle_finger_fk_ctrl_1_l", "middle_finger_fk_ctrl_1_r", "middle_finger_fk_ctrl_2_l", "middle_finger_fk_ctrl_2_r", "middle_finger_fk_ctrl_3_l", "middle_finger_fk_ctrl_3_r",
"index_metacarpal_ctrl_l", "index_metacarpal_ctrl_r", "index_finger_fk_ctrl_1_l", "index_finger_fk_ctrl_1_r", "index_finger_fk_ctrl_2_l", "index_finger_fk_ctrl_2_r", "index_finger_fk_ctrl_3_l", "index_finger_fk_ctrl_3_r",
"thumb_finger_fk_ctrl_1_l", "thumb_finger_fk_ctrl_1_r", "thumb_finger_fk_ctrl_2_l", "thumb_finger_fk_ctrl_2_r", "thumb_finger_fk_ctrl_3_l", "thumb_finger_fk_ctrl_3_r",
"index_l_ik_anim", "index_r_ik_anim", "middle_l_ik_anim", "middle_r_ik_anim", "ring_l_ik_anim", "ring_r_ik_anim", "pinky_l_ik_anim", "pinky_r_ik_anim", "thumb_l_ik_anim", "thumb_r_ik_anim",
"index_l_poleVector", "index_r_poleVector", "middle_l_poleVector", "middle_r_poleVector", "ring_l_poleVector", "ring_r_poleVector", "pinky_l_poleVector", "pinky_r_poleVector", "thumb_l_poleVector", "thumb_r_poleVector",
"l_global_ik_anim", "r_global_ik_anim", "lowerarm_l_twist_anim", "lowerarm_l_twist2_anim", "lowerarm_l_twist3_anim", "lowerarm_r_twist_anim", "lowerarm_r_twist2_anim", "lowerarm_r_twist3_anim", "calf_r_twist_anim", "calf_r_twist2_anim", "calf_r_twist3_anim",
"calf_l_twist_anim", "calf_l_twist2_anim", "calf_l_twist3_anim", "thigh_l_twist_2_anim", "thigh_l_twist_3_anim", "thigh_r_twist_2_anim", "thigh_r_twist_3_anim"]:
controls.append(control)
#find custom joints
customJoints = []
attrs = cmds.listAttr(character + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
for joint in customJoints:
attribute = cmds.getAttr(character + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "leaf":
label = label.partition(" (")[0]
control = label + "_anim"
controls.append(control)
if jointType == "jiggle":
control = label + "_anim"
controls.append(control)
if jointType == "chain" or jointType == "dynamic":
numJointsInChain = label.partition("(")[2].partition(")")[0]
label = label.partition(" (")[0]
for i in range(int(numJointsInChain)):
controls.append("fk_" + label + "_0" + str(i + 1) + "_anim")
controls.append(label + "_cv_0_anim")
controls.append(label + "_dyn_anim")
cmds.select("*:" + label + "_ik_*_anim")
selection = cmds.ls(sl = True)
for each in selection:
niceName = each.partition(":")[2]
controls.append(niceName)
for control in controls:
if cmds.objExists(character + ":" + control):
shape = cmds.listRelatives(character + ":" + control, shapes = True)[0]
if visibility == False:
cmds.setAttr(shape + ".v", 0)
if visibility == True:
cmds.setAttr(shape + ".v", 1)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def updateUI_scriptJob(self, *args):
self.mainScriptJob = cmds.scriptJob(event = ["timeChanged", self.updateUI], parent = self.widgets["window"], kws = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def updateUI(self, *args):
name = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
customJoints = []
attrs = cmds.listAttr(name + ":" + "Skeleton_Settings")
for attr in attrs:
if attr.find("extraJoint") == 0:
customJoints.append(attr)
for joint in customJoints:
attribute = cmds.getAttr(name + ":" + "Skeleton_Settings." + joint, asString = True)
jointType = attribute.partition("/")[2].partition("/")[0]
label = attribute.rpartition("/")[2]
if jointType == "chain" or jointType == "dynamic":
label = label.partition(" (")[0]
mode1 =cmds.getAttr(name + ":Rig_Settings." + label + "_fk")
mode2 =cmds.getAttr(name + ":Rig_Settings." + label + "_ik")
mode3 =cmds.getAttr(name + ":Rig_Settings." + label + "_dynamic")
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "fkModeButton"], edit = True, select = mode1)
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "ikModeButton"], edit = True, select = mode2)
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_customJoints_" + label + "dynModeButton"], edit = True, select = mode3)
mode = cmds.getAttr(name + ":Rig_Settings.rArmMode")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightArmIkModeButton"], edit = True, select = True)
mode = cmds.getAttr(name + ":Rig_Settings.lArmMode")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftArmIkModeButton"], edit = True, select = True)
mode = cmds.getAttr(name + ":Rig_Settings.lLegMode")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftLegFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_leftLegIkModeButton"], edit = True, select = True)
mode = cmds.getAttr(name + ":Rig_Settings.rLegMode")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightLegFkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_rightLegIkModeButton"], edit = True, select = True)
mode1 = cmds.getAttr(name + ":Rig_Settings.spine_fk")
mode2 = cmds.getAttr(name + ":Rig_Settings.spine_ik")
if mode1 > mode2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_spineFkModeButton"], edit = True, select = True)
else:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_spineIkModeButton"], edit = True, select = True)
mode = cmds.getAttr(name + ":head_fk_anim.fkOrientation")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headFkOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headShoulderOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headBodyOrientModeButton"], edit = True, select = True)
if mode == 3:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_headWorldOrientModeButton"], edit = True, select = True)
try:
mode = cmds.getAttr(name + ":neck_01_fk_anim.fkOrientation")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckShoulderOrientModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckBodyOrientModeButton"], edit = True, select = True)
if mode == 2:
cmds.iconTextRadioButton(self.widgets[name + "_rigSettings_neckWorldOrientModeButton"], edit = True, select = True)
except:
pass
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_l_ik_anim"):
mode = cmds.getAttr(name + ":" + finger + "_finger_l_mode_anim.FK_IK")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_FkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_LeftFinger_" + finger + "_IkModeButton"], edit = True, select = True)
for finger in ["index", "middle", "ring", "pinky", "thumb"]:
if cmds.objExists(name + ":" + finger + "_l_ik_anim"):
mode = cmds.getAttr(name + ":" + finger + "_finger_r_mode_anim.FK_IK")
if mode == 0:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_RightFinger_" + finger + "_FkModeButton"], edit = True, select = True)
if mode == 1:
cmds.iconTextRadioButton(self.widgets[name + "rigSettings_RightFinger_" + finger + "_IkModeButton"], edit = True, select = True)
try:
mode = cmds.getAttr(name + ":Rig_Settings.rFkArmOrient")
for button in[self.widgets[name + "_rightClavPickerButton"], self.widgets[name + "_rightShoulderPickerButton"], self.widgets[name + "_rightElbowPickerButton"], self.widgets[name + "_rightHandPickerButton"], self.widgets[name + "_rightIkElbowPickerButton"], self.widgets[name + "_rightIkHandPickerButton"]]:
if mode == 0:
clavVal = True
bodyVal = False
worldVal = False
if mode == 1:
clavVal = False
bodyVal = True
worldVal = False
if mode == 2:
clavVal = False
bodyVal = False
worldVal = True
cmds.menuItem(self.widgets[name + "_" + button + "_rightArm_ClavSpace"], edit = True, rb = clavVal)
cmds.menuItem(self.widgets[name + "_" + button + "_rightArm_BodySpace"], edit = True, rb = bodyVal)
cmds.menuItem(self.widgets[name + "_" + button + "_rightArm_WrldSpace"], edit = True, rb = worldVal)
except:
pass
try:
mode = cmds.getAttr(name + ":Rig_Settings.lFkArmOrient")
for button in[self.widgets[name + "_leftClavPickerButton"], self.widgets[name + "_leftShoulderPickerButton"], self.widgets[name + "_leftElbowPickerButton"], self.widgets[name + "_leftHandPickerButton"], self.widgets[name + "_leftIkElbowPickerButton"], self.widgets[name + "_leftIkHandPickerButton"]]:
if mode == 0:
clavVal = True
bodyVal = False
worldVal = False
if mode == 1:
clavVal = False
bodyVal = True
worldVal = False
if mode == 2:
clavVal = False
bodyVal = False
worldVal = True
cmds.menuItem(self.widgets[name + "_" + button + "_leftArm_ClavSpace"], edit = True, rb = clavVal)
cmds.menuItem(self.widgets[name + "_" + button + "_leftArm_BodySpace"], edit = True, rb = bodyVal)
cmds.menuItem(self.widgets[name + "_" + button + "_leftArm_WrldSpace"], edit = True, rb = worldVal)
except:
pass
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikKneeSolve(self, character, side, angle, object1, object2, *args):
currentVal = cmds.getAttr(character + ":ik_foot_anim_" + side + ".knee_twist")
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentVal + .25)
newAngle = self.getAngleBetween(object1, object2)
if newAngle > angle:
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentVal-.5)
cmds.setKeyframe(character + ":ik_foot_anim_" + side + ".knee_twist")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikHeelSolve(self, character, side, *args):
if cmds.objExists("ball_" + side):
print "ikHeelSolve"
value = cmds.getAttr("ball_" + side + ".rz")
cmds.setAttr(character + ":toe_wiggle_ctrl_" + side + ".rotateZ", value)
cmds.setKeyframe(character + ":toe_wiggle_ctrl_" + side + ".rotateZ")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def getAngleBetween(self, object1, object2):
import math
import maya.api.OpenMaya as om
point1 = cmds.xform(object1, t = True, q = True, ws = True)
vector1 = om.MVector(point1)
point2 = cmds.xform(object2, t = True, q = True, ws = True)
vector2 = om.MVector(point2)
dotProduct = vector1.normal() * vector2.normal()
angle = math.acos(dotProduct) * 180 / math.pi
return angle
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def match_singleFrame(self, limb, side, matchFrom, matchTo, *args):
autoKeyOn = False
if cmds.autoKeyframe(q = True, state = True):
cmds.autoKeyframe(state = False)
autoKeyOn = True
#get the passed in limb, and duplicate the skeleton for that limb's current mode
#for example, if limb is left arm, and matchFrom is IK, then dupe the driver joints (in IK pose) for the left arm and parent to world
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
armBones = ["upperarm_", "lowerarm_", "hand_"]
spineBones = ["driver_spine_01", "driver_spine_02", "driver_spine_03", "driver_spine_04", "driver_spine_05"]
legBones = ["thigh_", "calf_", "foot_", "ball_"]
#grab current selection
currentSelection = cmds.ls(sl = True)
#arm matching
if limb == "arm":
#setup constraints based on mode info
if matchTo == "IK":
constraint1 = cmds.orientConstraint(character + ":ik_upperarm_fk_matcher_" + side, character + ":fk_arm_" + side + "_anim")[0]
constraint2 = cmds.orientConstraint(character + ":ik_lowerarm_fk_matcher_" + side, character + ":fk_elbow_" + side + "_anim")[0]
constraint3 = cmds.orientConstraint(character + ":hand_match_loc_" + side, character + ":fk_wrist_" + side + "_anim")[0]
cmds.setKeyframe(character + ":fk_arm_" + side + "_anim")
cmds.setKeyframe(character + ":fk_elbow_" + side + "_anim")
cmds.setKeyframe(character + ":fk_wrist_" + side + "_anim")
cmds.delete(constraint1)
cmds.delete(constraint2)
cmds.delete(constraint3)
if matchTo == "FK":
dupeNodes = cmds.duplicate(character + ":driver_" + armBones[0] + side)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
cmds.pointConstraint("driver_hand_" + side, character + ":ik_wrist_" + side + "_anim")
constraint = cmds.orientConstraint(character + ":fk_wrist_" + side + "_anim", character + ":ik_wrist_" + side + "_anim")[0]
# CRA NEW CODE - For making sure the elbow match doesnt cause any weird twisting issues.
if cmds.objExists(character + ":elbowswitch_"+side):
print "NEW CODE WORKING"
ptCnst = cmds.pointConstraint(character + ":elbowswitch_"+side, character + ":ik_elbow_" + side + "_anim")
else:
cmds.pointConstraint("driver_lowerarm_"+side, character + ":ik_elbow_" + side + "_anim")
# CRA END NEW CODE
if side == "l":
cmds.setAttr(constraint + ".offsetX", 90)
if side == "r":
cmds.setAttr(constraint + ".offsetX", -90)
cmds.setKeyframe(character + ":ik_wrist_" + side + "_anim")
cmds.setKeyframe(character + ":ik_elbow_" + side + "_anim")
# CRA NEW CODE
if cmds.objExists(character + ":elbowswitch_"+side):
cmds.delete(ptCnst)
# CRA END NEW CODE
cmds.delete(constraint)
#delete the original mode pose joints
cmds.delete(dupeNodes[0])
if limb == "clav":
#setup constraints based on mode info
if matchTo == "IK":
constraint1 = cmds.orientConstraint(character + ":ik_clavicle_" + side, character + ":fk_clavicle_" + side + "_anim")[0]
cmds.setKeyframe(character + ":fk_clavicle_" + side + "_anim")
cmds.delete(constraint1)
if matchTo == "FK":
constraint1 = cmds.pointConstraint(character + ":driver_upperarm_" + side, character + ":clavicle_" + side + "_anim")[0]
cmds.setKeyframe(character + ":clavicle_" + side + "_anim")
cmds.delete(constraint1)
#leg matching
attrList = cmds.listAttr(character + ":Skeleton_Settings", shortNames=True)
for i in attrList:
if i == "legStyle":
legStyle = cmds.getAttr(character+":Skeleton_Settings.legStyle")
if limb == "leg":
dupeNodes = cmds.duplicate(character + ":" + legBones[0] + side)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#setup constraints based on mode info
if matchTo == "IK":
cmds.orientConstraint("thigh_" + side, character + ":fk_thigh_" + side + "_anim")
cmds.orientConstraint("calf_" + side, character + ":fk_calf_" + side + "_anim")
if legStyle == 1:
cmds.orientConstraint("heel_" + side, character + ":fk_heel_" + side + "_anim")
cmds.orientConstraint("foot_" + side, character + ":fk_foot_" + side + "_anim")
if cmds.objExists("ball_" + side):
cmds.orientConstraint("ball_" + side, character + ":fk_ball_" + side + "_anim")
cmds.setKeyframe(character + ":fk_thigh_" + side + "_anim")
cmds.setKeyframe(character + ":fk_calf_" + side + "_anim")
if legStyle == 1:
cmds.setKeyframe(character + ":fk_heel_" + side + "_anim")
cmds.setKeyframe(character + ":fk_foot_" + side + "_anim")
if cmds.objExists("ball_" + side):
cmds.setKeyframe(character + ":fk_ball_" + side + "_anim")
if matchTo == "FK":
cmds.select(clear = True)
loc1 = character + ":matchLoc_knee_1_" + side
loc2 = character + ":matchLoc_knee_2_" + side
#setup constraints
for ctrl in [":heel_ctrl_" + side, ":toe_wiggle_ctrl_" + side, ":toe_tip_ctrl_" + side]:
cmds.select(character + ctrl, add = True)
self.resetSelection()
cmds.setKeyframe()
loc = cmds.spaceLocator(name = character + ":ik_foot_anim_" + side + "_locator")[0]
constraint = cmds.parentConstraint("foot_" + side, loc)[0]
cmds.delete(constraint)
cmds.pointConstraint(loc, character + ":ik_foot_anim_" + side)
constraint = cmds.orientConstraint("foot_" + side, character + ":ik_foot_anim_" + side)[0]
if side == "l":
cmds.setAttr(constraint + ".offsetY", 90)
if side == "r":
cmds.setAttr(constraint + ".offsetX", 180)
cmds.setAttr(constraint + ".offsetY", 90)
#heel solve
cmds.setKeyframe(character + ":ik_foot_anim_" + side)
self.ikHeelSolve(character, side)
cmds.refresh(force = True)
#knee solve
for x in range(1500):
angle = self.getAngleBetween(loc1, loc2)
if abs(angle) > .25:
self.ikKneeSolve(character, side, angle, loc1, loc2)
else:
break
cmds.delete(loc)
cmds.setKeyframe(character + ":ik_foot_anim_" + side)
#delete the original mode pose joints
cmds.delete(dupeNodes[0])
#spine matching
if limb == "spine":
dupeNodes = cmds.duplicate(character + ":" + spineBones[0])
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#setup constraints based on mode info
# Switching to FK
if matchTo == "IK":
#check to see if user has any project specific match scripts (Fortnite custom)
if cmds.objExists(character + ":spine_02_anim.driven"):
#furthermore, if the plusMinusAvg nodes exist, then import custom matching
if os.path.exists(self.mayaToolsDir + "/General/Scripts/fortniteRotoMatch.py"):
result = cmds.confirmDialog(title = "Match Options", icon = "question", message = "Which match option would you like to use?", button = ["Standard", "Rotoscope"])
if result == "Standard":
try:
cmds.setAttr(character + ":spine_02_anim.driven", 0)
cmds.setAttr(character + ":spine_04_anim.driven", 0)
except:
pass
if cmds.objExists(character + ":spine_01_anim"):
cmds.orientConstraint("driver_spine_01", character + ":spine_01_anim")
cmds.setKeyframe(character + ":spine_01_anim")
if cmds.objExists(character + ":spine_02_anim"):
cmds.orientConstraint("driver_spine_02", character + ":spine_02_anim")
cmds.setKeyframe(character + ":spine_02_anim")
if cmds.objExists(character + ":spine_03_anim"):
cmds.orientConstraint("driver_spine_03", character + ":spine_03_anim")
cmds.setKeyframe(character + ":spine_03_anim")
if cmds.objExists(character + ":spine_04_anim"):
cmds.orientConstraint("driver_spine_04", character + ":spine_04_anim")
cmds.setKeyframe(character + ":spine_04_anim")
if cmds.objExists(character + ":spine_05_anim"):
cmds.orientConstraint("driver_spine_05", character + ":spine_05_anim")
cmds.setKeyframe(character + ":spine_05_anim")
#for each in spineBones:
#if cmds.objExists(character + ":" + each + "_anim"):
#cmds.setKeyframe(character + ":" + each + "_anim")
if result == "Rotoscope":
import fortniteRotoMatch as fnRm
reload(fnRm)
fnRm.RotoSpineMatch(character)
else:
if cmds.objExists(character + ":spine_01_anim"):
cmds.orientConstraint("driver_spine_01", character + ":spine_01_anim")
cmds.setKeyframe(character + ":spine_01_anim")
if cmds.objExists(character + ":spine_02_anim"):
cmds.orientConstraint("driver_spine_02", character + ":spine_02_anim")
cmds.setKeyframe(character + ":spine_02_anim")
if cmds.objExists(character + ":spine_03_anim"):
cmds.orientConstraint("driver_spine_03", character + ":spine_03_anim")
cmds.setKeyframe(character + ":spine_03_anim")
if cmds.objExists(character + ":spine_04_anim"):
cmds.orientConstraint("driver_spine_04", character + ":spine_04_anim")
cmds.setKeyframe(character + ":spine_04_anim")
if cmds.objExists(character + ":spine_05_anim"):
cmds.orientConstraint("driver_spine_05", character + ":spine_05_anim")
cmds.setKeyframe(character + ":spine_05_anim")
#for each in spineBones:
#if cmds.objExists(character + ":" + each + "_anim"):
#cmds.setKeyframe(character + ":" + each + "_anim")
# Switching to IK
if matchTo == "FK":
if cmds.objExists(character + ":chest_ik_anim"):
#find highest spine joint
numSpineBones = cmds.getAttr(character + ":Skeleton_Settings.numSpineBones")
if numSpineBones == 5:
endSpine = "driver_spine_05"
midSpine = ["driver_spine_03"]
if numSpineBones == 4:
endSpine = "driver_spine_04"
midSpine = ["driver_spine_02", "driver_spine_03"]
if numSpineBones == 3:
endSpine = "driver_spine_03"
midSpine = ["driver_spine_02"]
if cmds.objExists("chest_ik_anim_MATCH"):
cmds.parentConstraint("chest_ik_anim_MATCH", character + ":chest_ik_anim")
cmds.parentConstraint("mid_ik_anim_MATCH", character + ":mid_ik_anim")
else:
cmds.parentConstraint(endSpine, character + ":chest_ik_anim")
for each in midSpine:
cmds.parentConstraint(each, character + ":mid_ik_anim")
cmds.setKeyframe([character + ":chest_ik_anim", character + ":mid_ik_anim"])
#delete the original mode pose joints
cmds.delete(dupeNodes[0])
#reselect selection before entering process
if len(currentSelection) > 0:
cmds.select(currentSelection)
if autoKeyOn:
cmds.autoKeyframe(state = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def match_frameRange_bakeMotionToRefJoints(self, character, parts, start, end, *args):
constraints = []
bakeJoints = []
armBones = ["upperarm_", "lowerarm_", "hand_"]
spineBones = ["spine_01", "spine_02", "spine_03", "spine_04", "spine_05"]
legBones = ["thigh_", "calf_", "foot_", "ball_"]
#go through each part, and duplicate the appropriate part of the skeleton for constraining the rig to
for part in parts:
if part == character + ":" + "Left Arm":
#duplicate the current skeleton pose for the limb
dupeNodes = cmds.duplicate(character + ":" + armBones[0] + "l")
for node in dupeNodes:
bakeJoints.append(node)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#bake anim data onto dupe nodes
for node in dupeNodes:
constraint = cmds.parentConstraint(character + ":" + node, node)[0]
constraints.append(constraint)
if part == character + ":" + "Right Arm":
#duplicate the current skeleton pose for the limb
dupeNodes = cmds.duplicate(character + ":" + armBones[0] + "r")
for node in dupeNodes:
bakeJoints.append(node)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#bake anim data onto dupe nodes
for node in dupeNodes:
constraint = cmds.parentConstraint(character + ":" + node, node)[0]
constraints.append(constraint)
if part == character + ":" + "Left Leg":
#duplicate the current skeleton pose for the limb
dupeNodes = cmds.duplicate(character + ":" + legBones[0] + "l")
for node in dupeNodes:
bakeJoints.append(node)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#bake anim data onto dupe nodes
for node in dupeNodes:
try:
constraint = cmds.parentConstraint(character + ":" + node, node)[0]
constraints.append(constraint)
except:
pass
if part == character + ":" + "Right Leg":
#duplicate the current skeleton pose for the limb
dupeNodes = cmds.duplicate(character + ":" + legBones[0] + "r")
for node in dupeNodes:
bakeJoints.append(node)
parent = cmds.listRelatives(dupeNodes[0], parent = True)
if parent != None:
cmds.parent(dupeNodes[0], world = True)
#bake anim data onto dupe nodes
for node in dupeNodes:
try:
constraint = cmds.parentConstraint(character + ":" + node, node)[0]
constraints.append(constraint)
except:
pass
if part == character + ":" + "Spine":
#duplicate the current skeleton pose for the limb
dupeNodes = cmds.duplicate(character + ":" + spineBones[0])
#delete children after last spine bone
lastSpine = "spine_02"
if cmds.objExists("spine_03"):
lastSpine = "spine_03"
if cmds.objExists("spine_04"):
lastSpine = "spine_04"
if cmds.objExists("spine_05"):
lastSpine = "spine_05"
children = cmds.listRelatives(lastSpine, children = True)
for child in children:
cmds.delete(child)
cmds.select(dupeNodes[0], hi = True)
newNodes = cmds.ls(sl = True)
for node in newNodes:
bakeJoints.append(node)
parent = cmds.listRelatives(newNodes[0], parent = True)
if parent != None:
cmds.parent(newNodes[0], world = True)
#bake anim data onto dupe nodes
for node in newNodes:
constraint = cmds.parentConstraint(character + ":" + node, node)[0]
constraints.append(constraint)
#bake down all bakeJoints
if len(bakeJoints) > 0:
cmds.select(clear = True)
for each in bakeJoints:
cmds.select(each, add = True)
cmds.bakeResults(simulation = True, t= (start, end), preserveOutsideKeys = True)
for each in constraints:
cmds.delete(each)
#return bake joints
return bakeJoints
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikHeelSolve_frameRange(self, character, side, start, end, *args):
for i in range(int(start), int(end + 1)):
cmds.currentTime(i)
self.ikHeelSolve(character, side)
#iterate x
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def checkDistance(self, character, distanceNode, distanceAttr, originalValue, side):
if distanceAttr > 1:
currentAttr = cmds.getAttr(character + ":ik_foot_anim_" + side + ".knee_twist")
try:
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentAttr + 1)
cmds.setKeyframe(character + ":ik_foot_anim_" + side + ".knee_twist")
newDist = cmds.getAttr(distanceNode + ".distance")
if newDist < originalValue:
self.checkDistance(character, distanceNode, newDist, newDist, side)
cmds.progressWindow(self.progWindow, edit=True, progress= (cmds.progressWindow(q = True, progress = True) + 3), status= "Solving IK Pole Vectors" )
if newDist > originalValue:
cmds.setAttr(character + ":ik_foot_anim_" + side + ".knee_twist", currentAttr - 2)
cmds.setKeyframe(character + ":ik_foot_anim_" + side + ".knee_twist")
newDist = cmds.getAttr(distanceNode + ".distance")
self.checkDistance(character, distanceNode, newDist, newDist, side)
cmds.progressWindow(self.progWindow, edit=True, progress= (cmds.progressWindow(q = True, progress = True) + 3), status= "Solving IK Pole Vectors" )
except:
pass
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def ikKneeSolve_frameRange(self, character, side, start, end, *args):
self.ikKneeSolve(character, side, angle, object1, object2)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def match_frameRange_UI_Process(self, *args):
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
#get the body parts to match from the text scroll list
parts = cmds.textScrollList(self.widgets["matchFrameRange_RigList"] , q = True, si = True)
if parts == None:
cmds.warning("Nothing selected in the parts list.")
return
#get match method
button = cmds.iconTextRadioCollection(self.widgets["matchFrameRange_RadioCollection"], q = True, sl = True)
method = cmds.iconTextRadioButton(button, q = True, ann = True)
#get frame range
start = cmds.intFieldGrp(self.widgets["matchFrameRange_FrameRange"], q = True, value1 = True)
end = cmds.intFieldGrp(self.widgets["matchFrameRange_FrameRange"], q = True, value2 = True)
#for frame in frame range, for each part selected in the list, run the match function
for i in range(start, end + 1):
cmds.currentTime(i)
for part in parts:
if part == character + ":" + "Right Leg":
if method == "fk":
self.match_singleFrame("leg", "r", "FK", "IK")
if method == "ik":
self.match_singleFrame("leg", "r", "IK", "FK")
if part == character + ":" + "Left Leg":
if method == "fk":
self.match_singleFrame("leg", "l", "FK", "IK")
if method == "ik":
self.match_singleFrame("leg", "l", "IK", "FK")
if part == character + ":" + "Left Arm":
if method == "fk":
self.match_singleFrame("arm", "l", "FK", "IK")
if method == "ik":
self.match_singleFrame("arm", "l", "IK", "FK")
if part == character + ":" + "Right Arm":
if method == "fk":
self.match_singleFrame("arm", "r", "FK", "IK")
if method == "ik":
self.match_singleFrame("arm", "r", "IK", "FK")
if part == character + ":" + "Spine":
if method == "fk":
self.match_singleFrame("spine", None, "FK", "IK")
if method == "ik":
self.match_singleFrame("spine", None, "IK", "FK")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def match_frameRange_UI_Cancel(self, *args):
cmds.deleteUI(self.widgets["matchFrameRange_Window"])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def match_frameRange_UI(self, *args):
if cmds.window("matchOverFrameRange_UI", exists = True):
cmds.deleteUI("matchOverFrameRange_UI")
character = cmds.symbolButton(self.widgets["activeCharacterThumb"], q = True, ann = True)
self.widgets["matchFrameRange_Window"] = cmds.window("matchOverFrameRange_UI", w = 500, h = 300, sizeable = True, title = "Match Over Frame Range", titleBarMenu = False)
#main layout
self.widgets["matchFrameRange_MainLayout"] = cmds.formLayout(w = 500, h = 300)
#create the UI elements we need
#rig part list
self.widgets["matchFrameRange_RigList"] = cmds.textScrollList(w = 200, h = 250, parent = self.widgets["matchFrameRange_MainLayout"], allowMultiSelection = True)
#populate the list with the current character's limbs that can match
for part in ["Left Arm", "Right Arm", "Left Leg", "Right Leg", "Spine"]:
cmds.textScrollList(self.widgets["matchFrameRange_RigList"], edit = True, append = character + ":" + part)
#frame range
start = cmds.playbackOptions(q = True, min = True)
end = cmds.playbackOptions(q = True, max = True)
self.widgets["matchFrameRange_FrameRange"] = cmds.intFieldGrp(numberOfFields=2, label='Frame Range:', value1 = start, value2 = end, cw = [(1, 100), (2,80), (3, 80)] )
#radio buttons for match method
self.widgets["matchFrameRange_RadioCollection"] = cmds.iconTextRadioCollection()
self.widgets["matchFrameRange_FkToIk"] = cmds.iconTextRadioButton( ann = "fk", select = True, st='iconOnly', image = self.mayaToolsDir + "/General/Icons/ART/fktoik_off.bmp", selectionImage = self.mayaToolsDir + "/General/Icons/ART/fktoik_on.bmp", w = 125, h = 50, collection = self.widgets["matchFrameRange_RadioCollection"], parent = self.widgets["matchFrameRange_MainLayout"])
self.widgets["matchFrameRange_IkToFk"] = cmds.iconTextRadioButton( ann = "ik", st='iconOnly', image = self.mayaToolsDir + "/General/Icons/ART/iktofk_off.bmp", selectionImage = self.mayaToolsDir + "/General/Icons/ART/iktofk_on.bmp", w = 125, h = 50, collection = self.widgets["matchFrameRange_RadioCollection"] , parent = self.widgets["matchFrameRange_MainLayout"])
#process button and cancel button
self.widgets["matchFrameRange_Process"] = cmds.button(w = 125, h = 50, label = "Process", c = self.match_frameRange_UI_Process)
self.widgets["matchFrameRange_Cancel"] = cmds.button(w = 125, h = 50, label = "Cancel", c = self.match_frameRange_UI_Cancel)
#ik solve options
label = cmds.text(label = "IK Solve Options:", font = "boldLabelFont")
self.widgets["matchFR_RollSolveCB"] = cmds.checkBox(label = "Solve Foot Roll", v = False, parent = self.widgets["matchFrameRange_MainLayout"])
self.widgets["matchFR_KneeSolveCB"] = cmds.checkBox(label = "Solve Knee Vectors", v = True, parent = self.widgets["matchFrameRange_MainLayout"])
#place UI widgets
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_RigList"], 'left', 10), (self.widgets["matchFrameRange_RigList"], 'top', 25)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_FrameRange"], 'left', 210), (self.widgets["matchFrameRange_FrameRange"], 'top', 25)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_FkToIk"], 'left', 230), (self.widgets["matchFrameRange_FkToIk"], 'top', 75)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_IkToFk"], 'right', 10), (self.widgets["matchFrameRange_IkToFk"], 'top', 75)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_Process"], 'left', 230), (self.widgets["matchFrameRange_Process"], 'bottom', 25)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFrameRange_Cancel"], 'right', 10), (self.widgets["matchFrameRange_Cancel"], 'bottom', 25)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(label, 'left', 230), (label, 'bottom', 140)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFR_RollSolveCB"], 'left', 230), (self.widgets["matchFR_RollSolveCB"], 'bottom', 115)])
cmds.formLayout(self.widgets["matchFrameRange_MainLayout"], edit = True, af = [(self.widgets["matchFR_KneeSolveCB"], 'right', 10), (self.widgets["matchFR_KneeSolveCB"], 'bottom', 115)])
#show the window
cmds.showWindow(self.widgets["matchFrameRange_Window"])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def control_scale_init(self, *args):
#launch a simple UI with a slider to control the scale
if cmds.window("controlScaleWindow", exists = True):
cmds.deleteUI("controlScaleWindow")
self.widgets["controlScaleWindow"] = cmds.window("controlScaleWindow", title = "CV Scale", w = 150, h = 50, sizeable = True, mnb = False, mxb = False)
mainLayout = cmds.formLayout(w = 150, h = 50)
smallerButton = cmds.button(label = "v", w = 50, h = 30, c = partial(self.control_scale_execute, .9))
largerButton = cmds.button(label = "^", w = 50, h = 30, c = partial(self.control_scale_execute, 1.1))
cmds.formLayout(mainLayout, edit = True, af = [(smallerButton, 'left', 25), (smallerButton, 'top', 10)])
cmds.formLayout(mainLayout, edit = True, af = [(largerButton, 'right', 25), (largerButton, 'top', 10)])
cmds.showWindow(self.widgets["controlScaleWindow"])
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def control_scale_execute(self, amount, *args):
#get the value
selection = cmds.ls(sl = True)
cmds.select(clear = True)
for each in selection:
if each.find("anim") != -1:
#select all cvs
cmds.select(each + ".cv[*]", add = True)
#set scale
cmds.scale(amount, amount, amount, relative = True, ocp = True)
#reselect
cmds.select(clear = True)
for each in selection:
cmds.select(each, add = True)
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def animHelp(self, *args):
cmds.launch(web = "https://docs.unrealengine.com/latest/INT/Engine/Content/Tools/MayaRiggingTool/RigTool_Animation/index.html")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setup_ik_driven_fk_rig(self, *args):
character = cmds.symbolButton('activeCharacterThumb', q = True, ann = True)
#check to make sure FK wrist control is selected
selection = cmds.ls(sl = True)[0]
if selection.find(character + ":fk_wrist_") != -1:
side = selection.partition(":fk_wrist_")[2].partition("_")[0]
#duplicate FK arm joints
upArm = cmds.duplicate(character + ":fk_upperarm_" + side, po = True, name = "ik_driver_fk_upperarm_" + side)[0]
lowArm = cmds.duplicate(character + ":fk_lowerarm_" + side, po = True, name = "ik_driver_fk_lowerarm_" + side)[0]
wrist = cmds.duplicate(character + ":fk_hand_" + side, po = True, name = "ik_driver_fk_hand_" + side)[0]
cmds.parent(lowArm, upArm)
cmds.parent(wrist, lowArm)
#set preferred angle on elbow
cmds.setAttr(lowArm + ".preferredAngleZ", -90)
cmds.setAttr(upArm + ".v", 0)
#create rp ik
rpIkHandle = cmds.ikHandle(name = "ikdriver_fk_arm_ikHandle_" + side, solver = "ikRPsolver", sj = upArm, ee = wrist)[0]
cmds.setAttr(rpIkHandle + ".v", 0)
#parent ik hand under fk_wrist_r_anim
cmds.select(rpIkHandle)
#constrain fk controls to joints
cmds.orientConstraint(upArm, character + ":fk_arm_" + side + "_anim", mo = True)
cmds.orientConstraint(lowArm, character + ":fk_elbow_" + side + "_anim", mo = True)
cmds.orientConstraint(wrist, character + ":fk_wrist_" + side + "_anim", mo = True)
cmds.setToolTo( 'moveSuperContext' )
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def destroy_ik_driven_fk_rig(self, *args):
character = cmds.symbolButton('activeCharacterThumb', q = True, ann = True)
selected = cmds.ls(sl = True)[0]
selectedSide = selected.partition("ikdriver_fk_arm_ikHandle_")[2]
for side in ["l", "r"]:
if cmds.objExists("ikdriver_fk_arm_ikHandle_" + side):
cmds.setKeyframe(character + ":fk_arm_" + side + "_anim")
cmds.setKeyframe(character + ":fk_elbow_" + side + "_anim")
cmds.setKeyframe(character + ":fk_wrist_" + side + "_anim")
cmds.delete(["ikdriver_fk_arm_ikHandle_" + side, "ik_driver_fk_upperarm_" + side ])
cmds.select(character + ":fk_wrist_" + selectedSide + "_anim")
cmds.setToolTo("RotateSuperContext")
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def eulerFilterAll(self, *args):
self.selectAll()
cmds.selectKey()
cmds.filterCurve()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def eulerFilterSelected(self, *args):
cmds.selectKey()
cmds.filterCurve()
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def toggleVisibilityOnSelectedControlGroups(self, name, *args):
#get all controls below
children = cmds.treeView(self.widgets[name + "_treeViewWidget"], q = True, children = args[0])
for child in children:
if cmds.objExists(name + ":" + child):
try:
shape = cmds.listRelatives(name + ":" + child, shapes = True)[0]
visibility = cmds.getAttr(shape + ".v")
if visibility == True:
cmds.setAttr(shape + ".v", 0)
if visibility == False:
cmds.setAttr(shape + ".v", 1)
except:
pass
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
def setupButtonAttrsOnControls(self):
#when the UI is launched, we need to add an attribute to each control that tells us the corresponding button name
characters = self.getCharacters()
#add string attrs to controls
for character in characters:
#head
try:
if cmds.objExists(character + ":" + "head_fk_anim.buttonName"):
cmds.setAttr(character + ":" + "head_fk_anim.buttonName",self.widgets[character + "_headPickerButton"], type = "string")
else:
cmds.select(character + ":" + "head_fk_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "head_fk_anim.buttonName",self.widgets[character + "_headPickerButton"], type = "string")
except:
pass
#neck1
try:
if cmds.objExists(character + ":" + "neck_01_fk_anim"):
if cmds.objExists(character + ":" + "neck_01_fk_anim.buttonName"):
cmds.setAttr(character + ":" + "neck_01_fk_anim.buttonName",self.widgets[character + "_neck1_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "neck_01_fk_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "neck_01_fk_anim.buttonName",self.widgets[character + "_neck1_PickerButton"], type = "string")
else:
if cmds.objExists(character + ":" + "neck_fk_anim.buttonName"):
cmds.setAttr(character + ":" + "neck_fk_anim.buttonName",self.widgets[character + "_neck1_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "neck_fk_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "neck_fk_anim.buttonName",self.widgets[character + "_neck1_PickerButton"], type = "string")
except:
pass
#neck2
try:
if cmds.objExists(character + ":" + "neck_02_fk_anim"):
if cmds.objExists(character + ":" + "neck_02_fk_anim.buttonName"):
cmds.setAttr(character + ":" + "neck_02_fk_anim.buttonName",self.widgets[character + "_neck2_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "neck_02_fk_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "neck_02_fk_anim.buttonName",self.widgets[character + "_neck2_PickerButton"], type = "string")
except:
pass
#neck3
try:
if cmds.objExists(character + ":" + "neck_03_fk_anim"):
if cmds.objExists(character + ":" + "neck_03_fk_anim.buttonName"):
cmds.setAttr(character + ":" + "neck_03_fk_anim.buttonName",self.widgets[character + "_neck3_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "neck_03_fk_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "neck_03_fk_anim.buttonName",self.widgets[character + "_neck3_PickerButton"], type = "string")
except:
pass
#fk spine 1
try:
if cmds.objExists(character + ":" + "spine_01_anim"):
if cmds.objExists(character + ":" + "spine_01_anim.buttonName"):
cmds.setAttr(character + ":" + "spine_01_anim.buttonName",self.widgets[character + "_spine1_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "spine_01_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "spine_01_anim.buttonName",self.widgets[character + "_spine1_PickerButton"], type = "string")
except:
pass
#fk spine 2
try:
if cmds.objExists(character + ":" + "spine_02_anim"):
if cmds.objExists(character + ":" + "spine_02_anim.buttonName"):
cmds.setAttr(character + ":" + "spine_02_anim.buttonName",self.widgets[character + "_spine2_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "spine_02_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "spine_02_anim.buttonName",self.widgets[character + "_spine2_PickerButton"], type = "string")
except:
pass
#fk spine 3
try:
if cmds.objExists(character + ":" + "spine_03_anim"):
if cmds.objExists(character + ":" + "spine_03_anim.buttonName"):
cmds.setAttr(character + ":" + "spine_03_anim.buttonName",self.widgets[character + "_spine3_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "spine_03_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "spine_03_anim.buttonName",self.widgets[character + "_spine3_PickerButton"], type = "string")
except:
pass
#fk spine 4
try:
if cmds.objExists(character + ":" + "spine_04_anim"):
if cmds.objExists(character + ":" + "spine_04_anim.buttonName"):
cmds.setAttr(character + ":" + "spine_04_anim.buttonName",self.widgets[character + "_spine4_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "spine_04_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "spine_04_anim.buttonName",self.widgets[character + "_spine4_PickerButton"], type = "string")
except:
pass
#fk spine 5
try:
if cmds.objExists(character + ":" + "spine_05_anim"):
if cmds.objExists(character + ":" + "spine_05_anim.buttonName"):
cmds.setAttr(character + ":" + "spine_05_anim.buttonName",self.widgets[character + "_spine5_PickerButton"], type = "string")
else:
cmds.select(character + ":" + "spine_05_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "spine_05_anim.buttonName",self.widgets[character + "_spine5_PickerButton"], type = "string")
except:
pass
#ik spine
try:
if cmds.objExists(character + ":" + "mid_ik_anim"):
if cmds.objExists(character + ":" + "mid_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "mid_ik_anim.buttonName",self.widgets[character + "_ikSpineMidPickerButton"], type = "string")
else:
cmds.select(character + ":" + "mid_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "mid_ik_anim.buttonName",self.widgets[character + "_ikSpineMidPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "chest_ik_anim"):
if cmds.objExists(character + ":" + "chest_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "chest_ik_anim.buttonName",self.widgets[character + "_ikSpineTopPickerButton"], type = "string")
else:
cmds.select(character + ":" + "chest_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "chest_ik_anim.buttonName",self.widgets[character + "_ikSpineTopPickerButton"], type = "string")
except:
pass
#body
try:
if cmds.objExists(character + ":" + "body_anim.buttonName"):
cmds.setAttr(character + ":" + "body_anim.buttonName",self.widgets[character + "_bodyPickerButton"], type = "string")
else:
cmds.select(character + ":" + "body_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "body_anim.buttonName",self.widgets[character + "_bodyPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "hip_anim.buttonName"):
cmds.setAttr(character + ":" + "hip_anim.buttonName",self.widgets[character + "_pelvisPickerButton"], type = "string")
else:
cmds.select(character + ":" + "hip_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "hip_anim.buttonName",self.widgets[character + "_pelvisPickerButton"], type = "string")
except:
pass
#clavicles
try:
if cmds.objExists(character + ":" + "clavicle_l_anim.buttonName"):
cmds.setAttr(character + ":" + "clavicle_l_anim.buttonName",self.widgets[character + "_leftClavPickerButton"], type = "string")
else:
cmds.select(character + ":" + "clavicle_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "clavicle_l_anim.buttonName",self.widgets[character + "_leftClavPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "clavicle_r_anim.buttonName"):
cmds.setAttr(character + ":" + "clavicle_r_anim.buttonName",self.widgets[character + "_rightClavPickerButton"], type = "string")
else:
cmds.select(character + ":" + "clavicle_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "clavicle_r_anim.buttonName",self.widgets[character + "_rightClavPickerButton"], type = "string")
except:
pass
#upper arms
try:
if cmds.objExists(character + ":" + "fk_arm_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_arm_l_anim.buttonName",self.widgets[character + "_leftShoulderPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_arm_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_arm_l_anim.buttonName",self.widgets[character + "_leftShoulderPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_arm_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_arm_r_anim.buttonName",self.widgets[character + "_rightShoulderPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_arm_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_arm_r_anim.buttonName",self.widgets[character + "_rightShoulderPickerButton"], type = "string")
except:
pass
#lower arms
try:
if cmds.objExists(character + ":" + "fk_elbow_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_elbow_l_anim.buttonName",self.widgets[character + "_leftElbowPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_elbow_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_elbow_l_anim.buttonName",self.widgets[character + "_leftElbowPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_elbow_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_elbow_r_anim.buttonName",self.widgets[character + "_rightElbowPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_elbow_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_elbow_r_anim.buttonName",self.widgets[character + "_rightElbowPickerButton"], type = "string")
except:
pass
#fk hands
try:
if cmds.objExists(character + ":" + "fk_wrist_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_wrist_l_anim.buttonName",self.widgets[character + "_leftHandPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_wrist_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_wrist_l_anim.buttonName",self.widgets[character + "_leftHandPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_wrist_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_wrist_r_anim.buttonName",self.widgets[character + "_rightHandPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_wrist_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_wrist_r_anim.buttonName",self.widgets[character + "_rightHandPickerButton"], type = "string")
except:
pass
#ik elbows
try:
if cmds.objExists(character + ":" + "ik_elbow_l_anim.buttonName"):
cmds.setAttr(character + ":" + "ik_elbow_l_anim.buttonName",self.widgets[character + "_leftIkElbowPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_elbow_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_elbow_l_anim.buttonName",self.widgets[character + "_leftIkElbowPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ik_elbow_r_anim.buttonName"):
cmds.setAttr(character + ":" + "ik_elbow_r_anim.buttonName",self.widgets[character + "_rightIkElbowPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_elbow_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_elbow_r_anim.buttonName",self.widgets[character + "_rightIkElbowPickerButton"], type = "string")
except:
pass
#ik hands
try:
if cmds.objExists(character + ":" + "ik_wrist_l_anim.buttonName"):
cmds.setAttr(character + ":" + "ik_wrist_l_anim.buttonName",self.widgets[character + "_leftIkHandPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_wrist_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_wrist_l_anim.buttonName",self.widgets[character + "_leftIkHandPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ik_wrist_r_anim.buttonName"):
cmds.setAttr(character + ":" + "ik_wrist_r_anim.buttonName",self.widgets[character + "_rightIkHandPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_wrist_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_wrist_r_anim.buttonName",self.widgets[character + "_rightIkHandPickerButton"], type = "string")
except:
pass
#fk thighs
try:
if cmds.objExists(character + ":" + "fk_thigh_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_thigh_l_anim.buttonName",self.widgets[character + "_leftThighPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_thigh_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_thigh_l_anim.buttonName",self.widgets[character + "_leftThighPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_thigh_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_thigh_r_anim.buttonName",self.widgets[character + "_rightThighPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_thigh_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_thigh_r_anim.buttonName",self.widgets[character + "_rightThighPickerButton"], type = "string")
except:
pass
#fk knees
try:
if cmds.objExists(character + ":" + "fk_calf_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_calf_l_anim.buttonName",self.widgets[character + "_leftFkKneePickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_calf_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_calf_l_anim.buttonName",self.widgets[character + "_leftFkKneePickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_calf_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_calf_r_anim.buttonName",self.widgets[character + "_rightFkKneePickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_calf_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_calf_r_anim.buttonName",self.widgets[character + "_rightFkKneePickerButton"], type = "string")
except:
pass
#fk ankles
try:
if cmds.objExists(character + ":" + "fk_foot_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_foot_l_anim.buttonName",self.widgets[character + "_leftFkAnklePickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_foot_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_foot_l_anim.buttonName",self.widgets[character + "_leftFkAnklePickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_foot_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_foot_r_anim.buttonName",self.widgets[character + "_rightFkAnklePickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_foot_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_foot_r_anim.buttonName",self.widgets[character + "_rightFkAnklePickerButton"], type = "string")
except:
pass
#fk ball joints
try:
if cmds.objExists(character + ":" + "fk_ball_l_anim"):
if cmds.objExists(character + ":" + "fk_ball_l_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_ball_l_anim.buttonName",self.widgets[character + "_leftFkBallPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_ball_l_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_ball_l_anim.buttonName",self.widgets[character + "_leftFkBallPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "fk_ball_r_anim"):
if cmds.objExists(character + ":" + "fk_ball_r_anim.buttonName"):
cmds.setAttr(character + ":" + "fk_ball_r_anim.buttonName",self.widgets[character + "_rightFkBallPickerButton"], type = "string")
else:
cmds.select(character + ":" + "fk_ball_r_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "fk_ball_r_anim.buttonName",self.widgets[character + "_rightFkBallPickerButton"], type = "string")
except:
pass
#ik feet
try:
if cmds.objExists(character + ":" + "ik_foot_anim_l.buttonName"):
cmds.setAttr(character + ":" + "ik_foot_anim_l.buttonName",self.widgets[character + "_leftIkFootPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_foot_anim_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_foot_anim_l.buttonName",self.widgets[character + "_leftIkFootPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ik_foot_anim_r.buttonName"):
cmds.setAttr(character + ":" + "ik_foot_anim_r.buttonName",self.widgets[character + "_rightIkFootPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ik_foot_anim_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ik_foot_anim_r.buttonName",self.widgets[character + "_rightIkFootPickerButton"], type = "string")
except:
pass
#ik heels
try:
if cmds.objExists(character + ":" + "heel_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "heel_ctrl_l.buttonName",self.widgets[character + "_leftIkHeelPickerButton"], type = "string")
else:
cmds.select(character + ":" + "heel_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "heel_ctrl_l.buttonName",self.widgets[character + "_leftIkHeelPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "heel_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "heel_ctrl_r.buttonName",self.widgets[character + "_rightIkHeelPickerButton"], type = "string")
else:
cmds.select(character + ":" + "heel_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "heel_ctrl_r.buttonName",self.widgets[character + "_rightIkHeelPickerButton"], type = "string")
except:
pass
#ik toe wiggles
try:
if cmds.objExists(character + ":" + "toe_wiggle_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "toe_wiggle_ctrl_l.buttonName",self.widgets[character + "_leftIkToeWigglePickerButton"], type = "string")
else:
cmds.select(character + ":" + "toe_wiggle_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "toe_wiggle_ctrl_l.buttonName",self.widgets[character + "_leftIkToeWigglePickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "toe_wiggle_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "toe_wiggle_ctrl_r.buttonName",self.widgets[character + "_rightIkToeWigglePickerButton"], type = "string")
else:
cmds.select(character + ":" + "toe_wiggle_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "toe_wiggle_ctrl_r.buttonName",self.widgets[character + "_rightIkToeWigglePickerButton"], type = "string")
except:
pass
#ik toes
try:
if cmds.objExists(character + ":" + "toe_tip_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "toe_tip_ctrl_l.buttonName",self.widgets[character + "_leftIkToePickerButton"], type = "string")
else:
cmds.select(character + ":" + "toe_tip_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "toe_tip_ctrl_l.buttonName",self.widgets[character + "_leftIkToePickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "toe_tip_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "toe_tip_ctrl_r.buttonName",self.widgets[character + "_rightIkToePickerButton"], type = "string")
else:
cmds.select(character + ":" + "toe_tip_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "toe_tip_ctrl_r.buttonName",self.widgets[character + "_rightIkToePickerButton"], type = "string")
except:
pass
#master, offset, root
try:
if cmds.objExists(character + ":" + "master_anim.buttonName"):
cmds.setAttr(character + ":" + "master_anim.buttonName",self.widgets[character + "_masterPickerButton"], type = "string")
else:
cmds.select(character + ":" + "master_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "master_anim.buttonName",self.widgets[character + "_masterPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "offset_anim.buttonName"):
cmds.setAttr(character + ":" + "offset_anim.buttonName",self.widgets[character + "_offsetPickerButton"], type = "string")
else:
cmds.select(character + ":" + "offset_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "offset_anim.buttonName",self.widgets[character + "_offsetPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "root_anim.buttonName"):
cmds.setAttr(character + ":" + "root_anim.buttonName",self.widgets[character + "_rootPickerButton"], type = "string")
else:
cmds.select(character + ":" + "root_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "root_anim.buttonName",self.widgets[character + "_rootPickerButton"], type = "string")
except:
pass
#upper arm rolls
try:
if cmds.objExists(character + ":" + "upperarm_l_twist_anim"):
if cmds.objExists(character + ":" + "upperarm_l_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_l_twist_anim.buttonName",self.widgets[character + "_leftArmRollPickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_l_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_l_twist_anim.buttonName",self.widgets[character + "_leftArmRollPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "upperarm_l_twist_2_anim"):
if cmds.objExists(character + ":" + "upperarm_l_twist_2_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_l_twist_2_anim.buttonName",self.widgets[character + "_leftArmRoll2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_l_twist_2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_l_twist_2_anim.buttonName",self.widgets[character + "_leftArmRoll2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "upperarm_l_twist_3_anim"):
if cmds.objExists(character + ":" + "upperarm_l_twist_3_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_l_twist_3_anim.buttonName",self.widgets[character + "_leftArmRoll3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_l_twist_3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_l_twist_3_anim.buttonName",self.widgets[character + "_leftArmRoll3PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "upperarm_r_twist_anim"):
if cmds.objExists(character + ":" + "upperarm_r_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_r_twist_anim.buttonName",self.widgets[character + "_rightArmRollPickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_r_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_r_twist_anim.buttonName",self.widgets[character + "_rightArmRollPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "upperarm_r_twist_2_anim"):
if cmds.objExists(character + ":" + "upperarm_r_twist_2_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_r_twist_2_anim.buttonName",self.widgets[character + "_rightArmRoll2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_r_twist_2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_r_twist_2_anim.buttonName",self.widgets[character + "_rightArmRoll2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "upperarm_r_twist_3_anim"):
if cmds.objExists(character + ":" + "upperarm_r_twist_3_anim.buttonName"):
cmds.setAttr(character + ":" + "upperarm_r_twist_3_anim.buttonName",self.widgets[character + "_rightArmRoll3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "upperarm_r_twist_3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "upperarm_r_twist_3_anim.buttonName",self.widgets[character + "_rightArmRoll3PickerButton"], type = "string")
except:
pass
#lower arm rolls
try:
if cmds.objExists(character + ":" + "lowerarm_l_twist_anim"):
if cmds.objExists(character + ":" + "lowerarm_l_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_l_twist_anim.buttonName",self.widgets[character + "_leftForeTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_l_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_l_twist_anim.buttonName",self.widgets[character + "_leftForeTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "lowerarm_l_twist2_anim"):
if cmds.objExists(character + ":" + "lowerarm_l_twist2_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_l_twist2_anim.buttonName",self.widgets[character + "_leftForeTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_l_twist2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_l_twist2_anim.buttonName",self.widgets[character + "_leftForeTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "lowerarm_l_twist3_anim"):
if cmds.objExists(character + ":" + "lowerarm_l_twist3_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_l_twist3_anim.buttonName",self.widgets[character + "_leftForeTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_l_twist3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_l_twist3_anim.buttonName",self.widgets[character + "_leftForeTwist3PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "lowerarm_r_twist_anim"):
if cmds.objExists(character + ":" + "lowerarm_r_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_r_twist_anim.buttonName",self.widgets[character + "_rightForeTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_r_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_r_twist_anim.buttonName",self.widgets[character + "_rightForeTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "lowerarm_r_twist2_anim"):
if cmds.objExists(character + ":" + "lowerarm_r_twist2_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_r_twist2_anim.buttonName",self.widgets[character + "_rightForeTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_r_twist2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_r_twist2_anim.buttonName",self.widgets[character + "_rightForeTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "lowerarm_r_twist3_anim"):
if cmds.objExists(character + ":" + "lowerarm_r_twist3_anim.buttonName"):
cmds.setAttr(character + ":" + "lowerarm_r_twist3_anim.buttonName",self.widgets[character + "_rightForeTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "lowerarm_r_twist3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "lowerarm_r_twist3_anim.buttonName",self.widgets[character + "_rightForeTwist3PickerButton"], type = "string")
except:
pass
#thigh twists
try:
if cmds.objExists(character + ":" + "l_thigh_twist_01_anim"):
if cmds.objExists(character + ":" + "l_thigh_twist_01_anim.buttonName"):
cmds.setAttr(character + ":" + "l_thigh_twist_01_anim.buttonName",self.widgets[character + "_leftThighTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "l_thigh_twist_01_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "l_thigh_twist_01_anim.buttonName",self.widgets[character + "_leftThighTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "l_thigh_twist_02_anim"):
if cmds.objExists(character + ":" + "l_thigh_twist_02_anim.buttonName"):
cmds.setAttr(character + ":" + "l_thigh_twist_02_anim.buttonName",self.widgets[character + "_leftThighTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "l_thigh_twist_02_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "l_thigh_twist_02_anim.buttonName",self.widgets[character + "_leftThighTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "l_thigh_twist_03_anim"):
if cmds.objExists(character + ":" + "l_thigh_twist_03_anim.buttonName"):
cmds.setAttr(character + ":" + "l_thigh_twist_03_anim.buttonName",self.widgets[character + "_leftThighTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "l_thigh_twist_03_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "l_thigh_twist_03_anim.buttonName",self.widgets[character + "_leftThighTwist3PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "r_thigh_twist_01_anim"):
if cmds.objExists(character + ":" + "r_thigh_twist_01_anim.buttonName"):
cmds.setAttr(character + ":" + "r_thigh_twist_01_anim.buttonName",self.widgets[character + "_rightThighTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "r_thigh_twist_01_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "r_thigh_twist_01_anim.buttonName",self.widgets[character + "_rightThighTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "r_thigh_twist_02_anim"):
if cmds.objExists(character + ":" + "r_thigh_twist_02_anim.buttonName"):
cmds.setAttr(character + ":" + "r_thigh_twist_02_anim.buttonName",self.widgets[character + "_rightThighTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "r_thigh_twist_02_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "r_thigh_twist_02_anim.buttonName",self.widgets[character + "_rightThighTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "r_thigh_twist_03_anim"):
if cmds.objExists(character + ":" + "r_thigh_twist_03_anim.buttonName"):
cmds.setAttr(character + ":" + "r_thigh_twist_03_anim.buttonName",self.widgets[character + "_rightThighTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "r_thigh_twist_03_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "r_thigh_twist_03_anim.buttonName",self.widgets[character + "_rightThighTwist3PickerButton"], type = "string")
except:
pass
#calf twists
try:
if cmds.objExists(character + ":" + "calf_l_twist_anim"):
if cmds.objExists(character + ":" + "calf_l_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_l_twist_anim.buttonName",self.widgets[character + "_leftCalfTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_l_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_l_twist_anim.buttonName",self.widgets[character + "_leftCalfTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "calf_l_twist2_anim"):
if cmds.objExists(character + ":" + "calf_l_twist2_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_l_twist2_anim.buttonName",self.widgets[character + "_leftCalfTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_l_twist2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_l_twist2_anim.buttonName",self.widgets[character + "_leftCalfTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "calf_l_twist3_anim"):
if cmds.objExists(character + ":" + "calf_l_twist3_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_l_twist3_anim.buttonName",self.widgets[character + "_leftCalfTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_l_twist3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_l_twist3_anim.buttonName",self.widgets[character + "_leftCalfTwist3PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "calf_r_twist_anim"):
if cmds.objExists(character + ":" + "calf_r_twist_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_r_twist_anim.buttonName",self.widgets[character + "_rightCalfTwistPickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_r_twist_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_r_twist_anim.buttonName",self.widgets[character + "_rightCalfTwistPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "calf_r_twist2_anim"):
if cmds.objExists(character + ":" + "calf_r_twist2_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_r_twist2_anim.buttonName",self.widgets[character + "_rightCalfTwist2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_r_twist2_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_r_twist2_anim.buttonName",self.widgets[character + "_rightCalfTwist2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "calf_r_twist3_anim"):
if cmds.objExists(character + ":" + "calf_r_twist3_anim.buttonName"):
cmds.setAttr(character + ":" + "calf_r_twist3_anim.buttonName",self.widgets[character + "_rightCalfTwist3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "calf_r_twist3_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "calf_r_twist3_anim.buttonName",self.widgets[character + "_rightCalfTwist3PickerButton"], type = "string")
except:
pass
#left pinky finger
try:
if cmds.objExists(character + ":" + "pinky_metacarpal_ctrl_l"):
if cmds.objExists(character + ":" + "pinky_metacarpal_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "pinky_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftPinkyMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_metacarpal_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftPinkyMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_1_l"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_1_l.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftPinky1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_1_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftPinky1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_2_l"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_2_l.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftPinky2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_2_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftPinky2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_3_l"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_3_l.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftPinky3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_3_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftPinky3PickerButton"], type = "string")
except:
pass
#left ring finger
try:
if cmds.objExists(character + ":" + "ring_metacarpal_ctrl_l"):
if cmds.objExists(character + ":" + "ring_metacarpal_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "ring_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftRingMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_metacarpal_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftRingMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_1_l"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_1_l.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftRing1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_1_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftRing1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_2_l"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_2_l.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftRing2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_2_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftRing2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_3_l"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_3_l.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftRing3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_3_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftRing3PickerButton"], type = "string")
except:
pass
#left middle finger
try:
if cmds.objExists(character + ":" + "middle_metacarpal_ctrl_l"):
if cmds.objExists(character + ":" + "middle_metacarpal_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "middle_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftMiddleMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_metacarpal_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftMiddleMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_1_l"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_1_l.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftMiddle1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_1_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftMiddle1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_2_l"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_2_l.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftMiddle2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_2_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftMiddle2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_3_l"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_3_l.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftMiddle3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_3_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftMiddle3PickerButton"], type = "string")
except:
pass
#left index finger
try:
if cmds.objExists(character + ":" + "index_metacarpal_ctrl_l"):
if cmds.objExists(character + ":" + "index_metacarpal_ctrl_l.buttonName"):
cmds.setAttr(character + ":" + "index_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftIndexMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_metacarpal_ctrl_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_metacarpal_ctrl_l.buttonName",self.widgets[character + "_leftIndexMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_1_l"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_1_l.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftIndex1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_1_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftIndex1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_2_l"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_2_l.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftIndex2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_2_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftIndex2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_3_l"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_3_l.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftIndex3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_3_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftIndex3PickerButton"], type = "string")
except:
pass
#left thumb finger
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_1_l"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_1_l.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftThumb1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_1_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_1_l.buttonName",self.widgets[character + "_leftThumb1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_2_l"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_2_l.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftThumb2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_2_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_2_l.buttonName",self.widgets[character + "_leftThumb2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_3_l"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_3_l.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftThumb3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_3_l")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_3_l.buttonName",self.widgets[character + "_leftThumb3PickerButton"], type = "string")
except:
pass
#left IK fingers
try:
if cmds.objExists(character + ":" + "index_l_ik_anim"):
if cmds.objExists(character + ":" + "index_l_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "index_l_ik_anim.buttonName",self.widgets[character + "_leftIndexFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_l_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_l_ik_anim.buttonName",self.widgets[character + "_leftIndexFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_l_ik_anim"):
if cmds.objExists(character + ":" + "middle_l_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "middle_l_ik_anim.buttonName",self.widgets[character + "_leftMiddleFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_l_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_l_ik_anim.buttonName",self.widgets[character + "_leftMiddleFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_l_ik_anim"):
if cmds.objExists(character + ":" + "ring_l_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "ring_l_ik_anim.buttonName",self.widgets[character + "_leftRingFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_l_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_l_ik_anim.buttonName",self.widgets[character + "_leftRingFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_l_ik_anim"):
if cmds.objExists(character + ":" + "pinky_l_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "pinky_l_ik_anim.buttonName",self.widgets[character + "_leftPinkyFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_l_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_l_ik_anim.buttonName",self.widgets[character + "_leftPinkyFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_l_ik_anim"):
if cmds.objExists(character + ":" + "thumb_l_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "thumb_l_ik_anim.buttonName",self.widgets[character + "_leftThumbFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_l_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_l_ik_anim.buttonName",self.widgets[character + "_leftThumbFingerIKPickerButton"], type = "string")
except:
pass
#left IK finger PVs
try:
if cmds.objExists(character + ":" + "index_l_ik_anim"):
if cmds.objExists(character + ":" + "index_l_poleVector.buttonName"):
cmds.setAttr(character + ":" + "index_l_poleVector.buttonName",self.widgets[character + "_leftIndexIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_l_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_l_poleVector.buttonName",self.widgets[character + "_leftIndexIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_l_ik_anim"):
if cmds.objExists(character + ":" + "middle_l_poleVector.buttonName"):
cmds.setAttr(character + ":" + "middle_l_poleVector.buttonName",self.widgets[character + "_leftMiddleIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_l_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_l_poleVector.buttonName",self.widgets[character + "_leftMiddleIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_l_ik_anim"):
if cmds.objExists(character + ":" + "ring_l_poleVector.buttonName"):
cmds.setAttr(character + ":" + "ring_l_poleVector.buttonName",self.widgets[character + "_leftRingIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_l_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_l_poleVector.buttonName",self.widgets[character + "_leftRingIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_l_ik_anim"):
if cmds.objExists(character + ":" + "pinky_l_poleVector.buttonName"):
cmds.setAttr(character + ":" + "pinky_l_poleVector.buttonName",self.widgets[character + "_leftPinkyIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_l_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_l_poleVector.buttonName",self.widgets[character + "_leftPinkyIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_l_ik_anim"):
if cmds.objExists(character + ":" + "thumb_l_poleVector.buttonName"):
cmds.setAttr(character + ":" + "thumb_l_poleVector.buttonName",self.widgets[character + "_leftThumbIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_l_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_l_poleVector.buttonName",self.widgets[character + "_leftThumbIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "l_global_ik_anim"):
if cmds.objExists(character + ":" + "l_global_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "l_global_ik_anim.buttonName",self.widgets[character + "_leftIkGlobalCtrlPickerButton"], type = "string")
else:
cmds.select(character + ":" + "l_global_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "l_global_ik_anim.buttonName",self.widgets[character + "_leftIkGlobalCtrlPickerButton"], type = "string")
except:
pass
#right pinky finger
try:
if cmds.objExists(character + ":" + "pinky_metacarpal_ctrl_r"):
if cmds.objExists(character + ":" + "pinky_metacarpal_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "pinky_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightPinkyMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_metacarpal_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightPinkyMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_1_r"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_1_r.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightPinky1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_1_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightPinky1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_2_r"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_2_r.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightPinky2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_2_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightPinky2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_3_r"):
if cmds.objExists(character + ":" + "pinky_finger_fk_ctrl_3_r.buttonName"):
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightPinky3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_finger_fk_ctrl_3_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightPinky3PickerButton"], type = "string")
except:
pass
#right ring finger
try:
if cmds.objExists(character + ":" + "ring_metacarpal_ctrl_r"):
if cmds.objExists(character + ":" + "ring_metacarpal_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "ring_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightRingMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_metacarpal_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightRingMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_1_r"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_1_r.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightRing1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_1_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightRing1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_2_r"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_2_r.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightRing2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_2_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightRing2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_3_r"):
if cmds.objExists(character + ":" + "ring_finger_fk_ctrl_3_r.buttonName"):
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightRing3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_finger_fk_ctrl_3_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightRing3PickerButton"], type = "string")
except:
pass
#right middle finger
try:
if cmds.objExists(character + ":" + "middle_metacarpal_ctrl_r"):
if cmds.objExists(character + ":" + "middle_metacarpal_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "middle_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightMiddleMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_metacarpal_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightMiddleMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_1_r"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_1_r.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightMiddle1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_1_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightMiddle1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_2_r"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_2_r.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightMiddle2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_2_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightMiddle2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_3_r"):
if cmds.objExists(character + ":" + "middle_finger_fk_ctrl_3_r.buttonName"):
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightMiddle3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_finger_fk_ctrl_3_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightMiddle3PickerButton"], type = "string")
except:
pass
#right index finger
try:
if cmds.objExists(character + ":" + "index_metacarpal_ctrl_r"):
if cmds.objExists(character + ":" + "index_metacarpal_ctrl_r.buttonName"):
cmds.setAttr(character + ":" + "index_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightIndexMetacarpalPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_metacarpal_ctrl_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_metacarpal_ctrl_r.buttonName",self.widgets[character + "_rightIndexMetacarpalPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_1_r"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_1_r.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightIndex1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_1_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightIndex1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_2_r"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_2_r.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightIndex2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_2_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightIndex2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_3_r"):
if cmds.objExists(character + ":" + "index_finger_fk_ctrl_3_r.buttonName"):
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightIndex3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_finger_fk_ctrl_3_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightIndex3PickerButton"], type = "string")
except:
pass
#right thumb finger
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_1_r"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_1_r.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightThumb1PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_1_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_1_r.buttonName",self.widgets[character + "_rightThumb1PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_2_r"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_2_r.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightThumb2PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_2_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_2_r.buttonName",self.widgets[character + "_rightThumb2PickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_3_r"):
if cmds.objExists(character + ":" + "thumb_finger_fk_ctrl_3_r.buttonName"):
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightThumb3PickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_finger_fk_ctrl_3_r")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_finger_fk_ctrl_3_r.buttonName",self.widgets[character + "_rightThumb3PickerButton"], type = "string")
except:
pass
#right IK fingers
try:
if cmds.objExists(character + ":" + "index_r_ik_anim"):
if cmds.objExists(character + ":" + "index_r_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "index_r_ik_anim.buttonName",self.widgets[character + "_rightIndexFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_r_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_r_ik_anim.buttonName",self.widgets[character + "_rightIndexFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_r_ik_anim"):
if cmds.objExists(character + ":" + "middle_r_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "middle_r_ik_anim.buttonName",self.widgets[character + "_rightMiddleFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_r_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_r_ik_anim.buttonName",self.widgets[character + "_rightMiddleFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_r_ik_anim"):
if cmds.objExists(character + ":" + "ring_r_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "ring_r_ik_anim.buttonName",self.widgets[character + "_rightRingFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_r_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_r_ik_anim.buttonName",self.widgets[character + "_rightRingFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_r_ik_anim"):
if cmds.objExists(character + ":" + "pinky_r_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "pinky_r_ik_anim.buttonName",self.widgets[character + "_rightPinkyFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_r_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_r_ik_anim.buttonName",self.widgets[character + "_rightPinkyFingerIKPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_r_ik_anim"):
if cmds.objExists(character + ":" + "thumb_r_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "thumb_r_ik_anim.buttonName",self.widgets[character + "_rightThumbFingerIKPickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_r_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_r_ik_anim.buttonName",self.widgets[character + "_rightThumbFingerIKPickerButton"], type = "string")
except:
pass
#right IK finger PVs
try:
if cmds.objExists(character + ":" + "index_r_ik_anim"):
if cmds.objExists(character + ":" + "index_r_poleVector.buttonName"):
cmds.setAttr(character + ":" + "index_r_poleVector.buttonName",self.widgets[character + "_rightIndexIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "index_r_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "index_r_poleVector.buttonName",self.widgets[character + "_rightIndexIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "middle_r_ik_anim"):
if cmds.objExists(character + ":" + "middle_r_poleVector.buttonName"):
cmds.setAttr(character + ":" + "middle_r_poleVector.buttonName",self.widgets[character + "_rightMiddleIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "middle_r_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "middle_r_poleVector.buttonName",self.widgets[character + "_rightMiddleIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "ring_r_ik_anim"):
if cmds.objExists(character + ":" + "ring_r_poleVector.buttonName"):
cmds.setAttr(character + ":" + "ring_r_poleVector.buttonName",self.widgets[character + "_rightRingIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "ring_r_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "ring_r_poleVector.buttonName",self.widgets[character + "_rightRingIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "pinky_r_ik_anim"):
if cmds.objExists(character + ":" + "pinky_r_poleVector.buttonName"):
cmds.setAttr(character + ":" + "pinky_r_poleVector.buttonName",self.widgets[character + "_rightPinkyIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "pinky_r_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "pinky_r_poleVector.buttonName",self.widgets[character + "_rightPinkyIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "thumb_r_ik_anim"):
if cmds.objExists(character + ":" + "thumb_r_poleVector.buttonName"):
cmds.setAttr(character + ":" + "thumb_r_poleVector.buttonName",self.widgets[character + "_rightThumbIkPvPickerButton"], type = "string")
else:
cmds.select(character + ":" + "thumb_r_poleVector")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "thumb_r_poleVector.buttonName",self.widgets[character + "_rightThumbIkPvPickerButton"], type = "string")
except:
pass
try:
if cmds.objExists(character + ":" + "r_global_ik_anim"):
if cmds.objExists(character + ":" + "r_global_ik_anim.buttonName"):
cmds.setAttr(character + ":" + "r_global_ik_anim.buttonName",self.widgets[character + "_rightIkGlobalCtrlPickerButton"], type = "string")
else:
cmds.select(character + ":" + "r_global_ik_anim")
cmds.addAttr(ln = "buttonName", dt = "string", keyable = False)
cmds.setAttr(character + ":" + "r_global_ik_anim.buttonName",self.widgets[character + "_rightIkGlobalCtrlPickerButton"], type = "string")
except:
pass
| 588,552
| 588,552
| 0.534187
| 54,030
| 588,552
| 5.639386
| 0.032723
| 0.07621
| 0.066755
| 0.043519
| 0.821182
| 0.741257
| 0.708674
| 0.68251
| 0.644816
| 0.621682
| 0
| 0.014827
| 0.293879
| 588,552
| 1
| 588,552
| 588,552
| 0.71834
| 0.076675
| 0
| 0.502654
| 0
| 0.000549
| 0.25055
| 0.154604
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.032217
| 0.003295
| null | null | 0.00238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
587bc039cae8dc3a91b4fe171a8806bf2d8a096e
| 60,361
|
py
|
Python
|
awsiot/iotshadow.py
|
vietmaiquoc/aws-iot-device-sdk-python-v2
|
1c393c903b53a959711494a6b626497802ea98df
|
[
"Apache-2.0"
] | 224
|
2019-11-24T14:36:24.000Z
|
2022-03-30T12:27:18.000Z
|
awsiot/iotshadow.py
|
vietmaiquoc/aws-iot-device-sdk-python-v2
|
1c393c903b53a959711494a6b626497802ea98df
|
[
"Apache-2.0"
] | 166
|
2019-12-03T19:41:40.000Z
|
2022-03-31T20:02:39.000Z
|
awsiot/iotshadow.py
|
vietmaiquoc/aws-iot-device-sdk-python-v2
|
1c393c903b53a959711494a6b626497802ea98df
|
[
"Apache-2.0"
] | 156
|
2020-01-02T18:15:45.000Z
|
2022-03-18T07:34:55.000Z
|
# Copyright Amazon.com, Inc. or its affiliates. All rights reserved.
# SPDX-License-Identifier: Apache-2.0.
# This file is generated
import awsiot
import concurrent.futures
import datetime
import typing
class IotShadowClient(awsiot.MqttServiceClient):
def publish_delete_named_shadow(self, request, qos):
# type: (DeleteNamedShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-pub-sub-topic
Args:
request: `DeleteNamedShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/delete'.format(request),
qos=qos,
payload=request.to_payload())
def publish_delete_shadow(self, request, qos):
# type: (DeleteShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-pub-sub-topic
Args:
request: `DeleteShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/delete'.format(request),
qos=qos,
payload=request.to_payload())
def publish_get_named_shadow(self, request, qos):
# type: (GetNamedShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-pub-sub-topic
Args:
request: `GetNamedShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/get'.format(request),
qos=qos,
payload=request.to_payload())
def publish_get_shadow(self, request, qos):
# type: (GetShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-pub-sub-topic
Args:
request: `GetShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/get'.format(request),
qos=qos,
payload=request.to_payload())
def publish_update_named_shadow(self, request, qos):
# type: (UpdateNamedShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-pub-sub-topic
Args:
request: `UpdateNamedShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/update'.format(request),
qos=qos,
payload=request.to_payload())
def publish_update_shadow(self, request, qos):
# type: (UpdateShadowRequest, int) -> concurrent.futures.Future
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-pub-sub-topic
Args:
request: `UpdateShadowRequest` instance.
qos: The Quality of Service guarantee of this message
Returns:
A Future whose result will be None if the
request is successfully published. The Future's result will be an
exception if the request cannot be published.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
return self._publish_operation(
topic='$aws/things/{0.thing_name}/shadow/update'.format(request),
qos=qos,
payload=request.to_payload())
def subscribe_to_delete_named_shadow_accepted(self, request, qos, callback):
# type: (DeleteNamedShadowSubscriptionRequest, int, typing.Callable[[DeleteShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-accepted-pub-sub-topic
Args:
request: `DeleteNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `DeleteShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/delete/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=DeleteShadowResponse.from_payload)
def subscribe_to_delete_named_shadow_rejected(self, request, qos, callback):
# type: (DeleteNamedShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-rejected-pub-sub-topic
Args:
request: `DeleteNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/delete/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
def subscribe_to_delete_shadow_accepted(self, request, qos, callback):
# type: (DeleteShadowSubscriptionRequest, int, typing.Callable[[DeleteShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-accepted-pub-sub-topic
Args:
request: `DeleteShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `DeleteShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/delete/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=DeleteShadowResponse.from_payload)
def subscribe_to_delete_shadow_rejected(self, request, qos, callback):
# type: (DeleteShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#delete-rejected-pub-sub-topic
Args:
request: `DeleteShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/delete/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
def subscribe_to_get_named_shadow_accepted(self, request, qos, callback):
# type: (GetNamedShadowSubscriptionRequest, int, typing.Callable[[GetShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-accepted-pub-sub-topic
Args:
request: `GetNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `GetShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/get/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=GetShadowResponse.from_payload)
def subscribe_to_get_named_shadow_rejected(self, request, qos, callback):
# type: (GetNamedShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-rejected-pub-sub-topic
Args:
request: `GetNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/get/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
def subscribe_to_get_shadow_accepted(self, request, qos, callback):
# type: (GetShadowSubscriptionRequest, int, typing.Callable[[GetShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-accepted-pub-sub-topic
Args:
request: `GetShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `GetShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/get/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=GetShadowResponse.from_payload)
def subscribe_to_get_shadow_rejected(self, request, qos, callback):
# type: (GetShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#get-rejected-pub-sub-topic
Args:
request: `GetShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/get/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
def subscribe_to_named_shadow_delta_updated_events(self, request, qos, callback):
# type: (NamedShadowDeltaUpdatedSubscriptionRequest, int, typing.Callable[[ShadowDeltaUpdatedEvent], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-delta-pub-sub-topic
Args:
request: `NamedShadowDeltaUpdatedSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ShadowDeltaUpdatedEvent`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/update/delta'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ShadowDeltaUpdatedEvent.from_payload)
def subscribe_to_named_shadow_updated_events(self, request, qos, callback):
# type: (NamedShadowUpdatedSubscriptionRequest, int, typing.Callable[[ShadowUpdatedEvent], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-documents-pub-sub-topic
Args:
request: `NamedShadowUpdatedSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ShadowUpdatedEvent`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/update/documents'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ShadowUpdatedEvent.from_payload)
def subscribe_to_shadow_delta_updated_events(self, request, qos, callback):
# type: (ShadowDeltaUpdatedSubscriptionRequest, int, typing.Callable[[ShadowDeltaUpdatedEvent], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-delta-pub-sub-topic
Args:
request: `ShadowDeltaUpdatedSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ShadowDeltaUpdatedEvent`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/update/delta'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ShadowDeltaUpdatedEvent.from_payload)
def subscribe_to_shadow_updated_events(self, request, qos, callback):
# type: (ShadowUpdatedSubscriptionRequest, int, typing.Callable[[ShadowUpdatedEvent], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-documents-pub-sub-topic
Args:
request: `ShadowUpdatedSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ShadowUpdatedEvent`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/update/documents'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ShadowUpdatedEvent.from_payload)
def subscribe_to_update_named_shadow_accepted(self, request, qos, callback):
# type: (UpdateNamedShadowSubscriptionRequest, int, typing.Callable[[UpdateShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-accepted-pub-sub-topic
Args:
request: `UpdateNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `UpdateShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/update/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=UpdateShadowResponse.from_payload)
def subscribe_to_update_named_shadow_rejected(self, request, qos, callback):
# type: (UpdateNamedShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-rejected-pub-sub-topic
Args:
request: `UpdateNamedShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not request.shadow_name:
raise ValueError("request.shadow_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/name/{0.shadow_name}/update/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
def subscribe_to_update_shadow_accepted(self, request, qos, callback):
# type: (UpdateShadowSubscriptionRequest, int, typing.Callable[[UpdateShadowResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-accepted-pub-sub-topic
Args:
request: `UpdateShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `UpdateShadowResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/update/accepted'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=UpdateShadowResponse.from_payload)
def subscribe_to_update_shadow_rejected(self, request, qos, callback):
# type: (UpdateShadowSubscriptionRequest, int, typing.Callable[[ErrorResponse], None]) -> typing.Tuple[concurrent.futures.Future, str]
"""
API Docs: https://docs.aws.amazon.com/iot/latest/developerguide/device-shadow-mqtt.html#update-rejected-pub-sub-topic
Args:
request: `UpdateShadowSubscriptionRequest` instance.
qos: The Quality of Service guarantee of this message
callback: Callback to invoke each time the event is received.
The callback should take 1 argument of type `ErrorResponse`.
The callback is not expected to return anything.
Returns:
Tuple with two values. The first is a Future
which will contain a result of `None` when the server has acknowledged
the subscription, or an exception if the subscription fails. The second
value is a topic which may be passed to `unsubscribe()` to stop
receiving messages. Note that messages may arrive before the
subscription is acknowledged.
"""
if not request.thing_name:
raise ValueError("request.thing_name is required")
if not callable(callback):
raise ValueError("callback is required")
return self._subscribe_operation(
topic='$aws/things/{0.thing_name}/shadow/update/rejected'.format(request),
qos=qos,
callback=callback,
payload_to_class_fn=ErrorResponse.from_payload)
class DeleteNamedShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
shadow_name (str)
thing_name (str)
Attributes:
client_token (str)
shadow_name (str)
thing_name (str)
"""
__slots__ = ['client_token', 'shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'shadow_name', 'thing_name'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
return payload
class DeleteNamedShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
shadow_name (str)
thing_name (str)
Attributes:
shadow_name (str)
thing_name (str)
"""
__slots__ = ['shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['shadow_name', 'thing_name'], args):
setattr(self, key, val)
class DeleteShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
thing_name (str)
Attributes:
client_token (str)
thing_name (str)
"""
__slots__ = ['client_token', 'thing_name']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
return payload
class DeleteShadowResponse(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
timestamp (datetime.datetime)
version (int)
Attributes:
client_token (str)
timestamp (datetime.datetime)
version (int)
"""
__slots__ = ['client_token', 'timestamp', 'version']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.timestamp = kwargs.get('timestamp')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['timestamp', 'version'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> DeleteShadowResponse
new = cls()
val = payload.get('clientToken')
if val is not None:
new.client_token = val
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
val = payload.get('version')
if val is not None:
new.version = val
return new
class DeleteShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
thing_name (str)
Attributes:
thing_name (str)
"""
__slots__ = ['thing_name']
def __init__(self, *args, **kwargs):
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
class ErrorResponse(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
code (int)
message (str)
timestamp (datetime.datetime)
Attributes:
client_token (str)
code (int)
message (str)
timestamp (datetime.datetime)
"""
__slots__ = ['client_token', 'code', 'message', 'timestamp']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.code = kwargs.get('code')
self.message = kwargs.get('message')
self.timestamp = kwargs.get('timestamp')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'code', 'message', 'timestamp'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ErrorResponse
new = cls()
val = payload.get('clientToken')
if val is not None:
new.client_token = val
val = payload.get('code')
if val is not None:
new.code = val
val = payload.get('message')
if val is not None:
new.message = val
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
return new
class GetNamedShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
shadow_name (str)
thing_name (str)
Attributes:
client_token (str)
shadow_name (str)
thing_name (str)
"""
__slots__ = ['client_token', 'shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'shadow_name', 'thing_name'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
return payload
class GetNamedShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
shadow_name (str)
thing_name (str)
Attributes:
shadow_name (str)
thing_name (str)
"""
__slots__ = ['shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['shadow_name', 'thing_name'], args):
setattr(self, key, val)
class GetShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
thing_name (str)
Attributes:
client_token (str)
thing_name (str)
"""
__slots__ = ['client_token', 'thing_name']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
return payload
class GetShadowResponse(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
metadata (ShadowMetadata)
state (ShadowStateWithDelta)
timestamp (datetime.datetime)
version (int)
Attributes:
client_token (str)
metadata (ShadowMetadata)
state (ShadowStateWithDelta)
timestamp (datetime.datetime)
version (int)
"""
__slots__ = ['client_token', 'metadata', 'state', 'timestamp', 'version']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.metadata = kwargs.get('metadata')
self.state = kwargs.get('state')
self.timestamp = kwargs.get('timestamp')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['metadata', 'state', 'timestamp', 'version'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> GetShadowResponse
new = cls()
val = payload.get('clientToken')
if val is not None:
new.client_token = val
val = payload.get('metadata')
if val is not None:
new.metadata = ShadowMetadata.from_payload(val)
val = payload.get('state')
if val is not None:
new.state = ShadowStateWithDelta.from_payload(val)
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
val = payload.get('version')
if val is not None:
new.version = val
return new
class GetShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
thing_name (str)
Attributes:
thing_name (str)
"""
__slots__ = ['thing_name']
def __init__(self, *args, **kwargs):
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
class NamedShadowDeltaUpdatedSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
shadow_name (str)
thing_name (str)
Attributes:
shadow_name (str)
thing_name (str)
"""
__slots__ = ['shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['shadow_name', 'thing_name'], args):
setattr(self, key, val)
class NamedShadowUpdatedSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
shadow_name (str)
thing_name (str)
Attributes:
shadow_name (str)
thing_name (str)
"""
__slots__ = ['shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['shadow_name', 'thing_name'], args):
setattr(self, key, val)
class ShadowDeltaUpdatedEvent(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
metadata (typing.Dict[str, typing.Any])
state (typing.Dict[str, typing.Any])
timestamp (datetime.datetime)
version (int)
Attributes:
metadata (typing.Dict[str, typing.Any])
state (typing.Dict[str, typing.Any])
timestamp (datetime.datetime)
version (int)
"""
__slots__ = ['metadata', 'state', 'timestamp', 'version']
def __init__(self, *args, **kwargs):
self.metadata = kwargs.get('metadata')
self.state = kwargs.get('state')
self.timestamp = kwargs.get('timestamp')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['metadata', 'state', 'timestamp', 'version'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowDeltaUpdatedEvent
new = cls()
val = payload.get('metadata')
if val is not None:
new.metadata = val
val = payload.get('state')
if val is not None:
new.state = val
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
val = payload.get('version')
if val is not None:
new.version = val
return new
class ShadowDeltaUpdatedSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
thing_name (str)
Attributes:
thing_name (str)
"""
__slots__ = ['thing_name']
def __init__(self, *args, **kwargs):
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
class ShadowMetadata(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
Attributes:
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
"""
__slots__ = ['desired', 'reported']
def __init__(self, *args, **kwargs):
self.desired = kwargs.get('desired')
self.reported = kwargs.get('reported')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['desired', 'reported'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowMetadata
new = cls()
val = payload.get('desired')
if val is not None:
new.desired = val
val = payload.get('reported')
if val is not None:
new.reported = val
return new
class ShadowState(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
Attributes:
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
"""
__slots__ = ['desired', 'reported']
def __init__(self, *args, **kwargs):
self.desired = kwargs.get('desired')
self.reported = kwargs.get('reported')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['desired', 'reported'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowState
new = cls()
val = payload.get('desired')
if val is not None:
new.desired = val
val = payload.get('reported')
if val is not None:
new.reported = val
return new
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.desired is not None:
payload['desired'] = self.desired
if self.reported is not None:
payload['reported'] = self.reported
return payload
class ShadowStateWithDelta(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
delta (typing.Dict[str, typing.Any])
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
Attributes:
delta (typing.Dict[str, typing.Any])
desired (typing.Dict[str, typing.Any])
reported (typing.Dict[str, typing.Any])
"""
__slots__ = ['delta', 'desired', 'reported']
def __init__(self, *args, **kwargs):
self.delta = kwargs.get('delta')
self.desired = kwargs.get('desired')
self.reported = kwargs.get('reported')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['delta', 'desired', 'reported'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowStateWithDelta
new = cls()
val = payload.get('delta')
if val is not None:
new.delta = val
val = payload.get('desired')
if val is not None:
new.desired = val
val = payload.get('reported')
if val is not None:
new.reported = val
return new
class ShadowUpdatedEvent(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
current (ShadowUpdatedSnapshot)
previous (ShadowUpdatedSnapshot)
timestamp (datetime.datetime)
Attributes:
current (ShadowUpdatedSnapshot)
previous (ShadowUpdatedSnapshot)
timestamp (datetime.datetime)
"""
__slots__ = ['current', 'previous', 'timestamp']
def __init__(self, *args, **kwargs):
self.current = kwargs.get('current')
self.previous = kwargs.get('previous')
self.timestamp = kwargs.get('timestamp')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['current', 'previous', 'timestamp'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowUpdatedEvent
new = cls()
val = payload.get('current')
if val is not None:
new.current = ShadowUpdatedSnapshot.from_payload(val)
val = payload.get('previous')
if val is not None:
new.previous = ShadowUpdatedSnapshot.from_payload(val)
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
return new
class ShadowUpdatedSnapshot(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
metadata (ShadowMetadata)
state (ShadowState)
version (int)
Attributes:
metadata (ShadowMetadata)
state (ShadowState)
version (int)
"""
__slots__ = ['metadata', 'state', 'version']
def __init__(self, *args, **kwargs):
self.metadata = kwargs.get('metadata')
self.state = kwargs.get('state')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['metadata', 'state', 'version'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> ShadowUpdatedSnapshot
new = cls()
val = payload.get('metadata')
if val is not None:
new.metadata = ShadowMetadata.from_payload(val)
val = payload.get('state')
if val is not None:
new.state = ShadowState.from_payload(val)
val = payload.get('version')
if val is not None:
new.version = val
return new
class ShadowUpdatedSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
thing_name (str)
Attributes:
thing_name (str)
"""
__slots__ = ['thing_name']
def __init__(self, *args, **kwargs):
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
class UpdateNamedShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
shadow_name (str)
state (ShadowState)
thing_name (str)
version (int)
Attributes:
client_token (str)
shadow_name (str)
state (ShadowState)
thing_name (str)
version (int)
"""
__slots__ = ['client_token', 'shadow_name', 'state', 'thing_name', 'version']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.shadow_name = kwargs.get('shadow_name')
self.state = kwargs.get('state')
self.thing_name = kwargs.get('thing_name')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'shadow_name', 'state', 'thing_name', 'version'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
if self.state is not None:
payload['state'] = self.state.to_payload()
if self.version is not None:
payload['version'] = self.version
return payload
class UpdateNamedShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
shadow_name (str)
thing_name (str)
Attributes:
shadow_name (str)
thing_name (str)
"""
__slots__ = ['shadow_name', 'thing_name']
def __init__(self, *args, **kwargs):
self.shadow_name = kwargs.get('shadow_name')
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['shadow_name', 'thing_name'], args):
setattr(self, key, val)
class UpdateShadowRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
state (ShadowState)
thing_name (str)
version (int)
Attributes:
client_token (str)
state (ShadowState)
thing_name (str)
version (int)
"""
__slots__ = ['client_token', 'state', 'thing_name', 'version']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.state = kwargs.get('state')
self.thing_name = kwargs.get('thing_name')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'state', 'thing_name', 'version'], args):
setattr(self, key, val)
def to_payload(self):
# type: () -> typing.Dict[str, typing.Any]
payload = {} # type: typing.Dict[str, typing.Any]
if self.client_token is not None:
payload['clientToken'] = self.client_token
if self.state is not None:
payload['state'] = self.state.to_payload()
if self.version is not None:
payload['version'] = self.version
return payload
class UpdateShadowResponse(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
client_token (str)
metadata (ShadowMetadata)
state (ShadowState)
timestamp (datetime.datetime)
version (int)
Attributes:
client_token (str)
metadata (ShadowMetadata)
state (ShadowState)
timestamp (datetime.datetime)
version (int)
"""
__slots__ = ['client_token', 'metadata', 'state', 'timestamp', 'version']
def __init__(self, *args, **kwargs):
self.client_token = kwargs.get('client_token')
self.metadata = kwargs.get('metadata')
self.state = kwargs.get('state')
self.timestamp = kwargs.get('timestamp')
self.version = kwargs.get('version')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['client_token', 'metadata', 'state', 'timestamp', 'version'], args):
setattr(self, key, val)
@classmethod
def from_payload(cls, payload):
# type: (typing.Dict[str, typing.Any]) -> UpdateShadowResponse
new = cls()
val = payload.get('clientToken')
if val is not None:
new.client_token = val
val = payload.get('metadata')
if val is not None:
new.metadata = ShadowMetadata.from_payload(val)
val = payload.get('state')
if val is not None:
new.state = ShadowState.from_payload(val)
val = payload.get('timestamp')
if val is not None:
new.timestamp = datetime.datetime.fromtimestamp(val)
val = payload.get('version')
if val is not None:
new.version = val
return new
class UpdateShadowSubscriptionRequest(awsiot.ModeledClass):
"""
All attributes are None by default, and may be set by keyword in the constructor.
Keyword Args:
thing_name (str)
Attributes:
thing_name (str)
"""
__slots__ = ['thing_name']
def __init__(self, *args, **kwargs):
self.thing_name = kwargs.get('thing_name')
# for backwards compatibility, read any arguments that used to be accepted by position
for key, val in zip(['thing_name'], args):
setattr(self, key, val)
| 39.043338
| 163
| 0.639602
| 7,069
| 60,361
| 5.35493
| 0.029
| 0.038516
| 0.010937
| 0.021081
| 0.947192
| 0.942939
| 0.935674
| 0.925133
| 0.916918
| 0.894172
| 0
| 0.001158
| 0.270191
| 60,361
| 1,545
| 164
| 39.068608
| 0.858145
| 0.462385
| 0
| 0.815961
| 1
| 0
| 0.163574
| 0.042921
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105863
| false
| 0
| 0.006515
| 0
| 0.262215
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
587d81d589fe81185c0a3fda087f053ed9710fae
| 51,738
|
py
|
Python
|
sdk/python/pulumi_oci/monitoring/outputs.py
|
EladGabay/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2021-08-17T11:14:46.000Z
|
2021-12-31T02:07:03.000Z
|
sdk/python/pulumi_oci/monitoring/outputs.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-09-06T11:21:29.000Z
|
2021-09-06T11:21:29.000Z
|
sdk/python/pulumi_oci/monitoring/outputs.py
|
pulumi-oci/pulumi-oci
|
6841e27d4a1a7e15c672306b769912efbfd3ba99
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-08-24T23:31:30.000Z
|
2022-01-02T19:26:54.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'AlarmSuppression',
'GetAlarmHistoryCollectionEntryResult',
'GetAlarmStatusesAlarmStatusResult',
'GetAlarmStatusesAlarmStatusSuppressionResult',
'GetAlarmStatusesFilterResult',
'GetAlarmSuppressionResult',
'GetAlarmsAlarmResult',
'GetAlarmsAlarmSuppressionResult',
'GetAlarmsFilterResult',
'GetMetricDataFilterResult',
'GetMetricDataMetricDataResult',
'GetMetricDataMetricDataAggregatedDatapointResult',
'GetMetricsFilterResult',
'GetMetricsMetricResult',
]
@pulumi.output_type
class AlarmSuppression(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "timeSuppressFrom":
suggest = "time_suppress_from"
elif key == "timeSuppressUntil":
suggest = "time_suppress_until"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AlarmSuppression. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AlarmSuppression.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AlarmSuppression.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
time_suppress_from: str,
time_suppress_until: str,
description: Optional[str] = None):
"""
:param str time_suppress_from: (Updatable) The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str time_suppress_until: (Updatable) The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
:param str description: (Updatable) Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
pulumi.set(__self__, "time_suppress_from", time_suppress_from)
pulumi.set(__self__, "time_suppress_until", time_suppress_until)
if description is not None:
pulumi.set(__self__, "description", description)
@property
@pulumi.getter(name="timeSuppressFrom")
def time_suppress_from(self) -> str:
"""
(Updatable) The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_from")
@property
@pulumi.getter(name="timeSuppressUntil")
def time_suppress_until(self) -> str:
"""
(Updatable) The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_until")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
(Updatable) Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@pulumi.output_type
class GetAlarmHistoryCollectionEntryResult(dict):
def __init__(__self__, *,
summary: str,
timestamp: str,
timestamp_triggered: str):
"""
:param str summary: Description for this alarm history entry. Avoid entering confidential information.
:param str timestamp: Timestamp for this alarm history entry. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str timestamp_triggered: Timestamp for the transition of the alarm state. For example, the time when the alarm transitioned from OK to Firing. Available for state transition entries only. Note: A three-minute lag for this value accounts for any late-arriving metrics. Example: `2019-02-01T0:59:00.789Z`
"""
pulumi.set(__self__, "summary", summary)
pulumi.set(__self__, "timestamp", timestamp)
pulumi.set(__self__, "timestamp_triggered", timestamp_triggered)
@property
@pulumi.getter
def summary(self) -> str:
"""
Description for this alarm history entry. Avoid entering confidential information.
"""
return pulumi.get(self, "summary")
@property
@pulumi.getter
def timestamp(self) -> str:
"""
Timestamp for this alarm history entry. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "timestamp")
@property
@pulumi.getter(name="timestampTriggered")
def timestamp_triggered(self) -> str:
"""
Timestamp for the transition of the alarm state. For example, the time when the alarm transitioned from OK to Firing. Available for state transition entries only. Note: A three-minute lag for this value accounts for any late-arriving metrics. Example: `2019-02-01T0:59:00.789Z`
"""
return pulumi.get(self, "timestamp_triggered")
@pulumi.output_type
class GetAlarmStatusesAlarmStatusResult(dict):
def __init__(__self__, *,
display_name: str,
id: str,
severity: str,
status: str,
suppression: 'outputs.GetAlarmStatusesAlarmStatusSuppressionResult',
timestamp_triggered: str):
"""
:param str display_name: A filter to return only resources that match the given display name exactly. Use this filter to list an alarm by name. Alternatively, when you know the alarm OCID, use the GetAlarm operation.
:param str id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the alarm.
:param str severity: The configured severity of the alarm. Example: `CRITICAL`
:param str status: The status of this alarm. Example: `FIRING`
:param 'GetAlarmStatusesAlarmStatusSuppressionArgs' suppression: The configuration details for suppressing an alarm.
:param str timestamp_triggered: Timestamp for the transition of the alarm state. For example, the time when the alarm transitioned from OK to Firing. Example: `2019-02-01T01:02:29.600Z`
"""
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "severity", severity)
pulumi.set(__self__, "status", status)
pulumi.set(__self__, "suppression", suppression)
pulumi.set(__self__, "timestamp_triggered", timestamp_triggered)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
A filter to return only resources that match the given display name exactly. Use this filter to list an alarm by name. Alternatively, when you know the alarm OCID, use the GetAlarm operation.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the alarm.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def severity(self) -> str:
"""
The configured severity of the alarm. Example: `CRITICAL`
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def status(self) -> str:
"""
The status of this alarm. Example: `FIRING`
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def suppression(self) -> 'outputs.GetAlarmStatusesAlarmStatusSuppressionResult':
"""
The configuration details for suppressing an alarm.
"""
return pulumi.get(self, "suppression")
@property
@pulumi.getter(name="timestampTriggered")
def timestamp_triggered(self) -> str:
"""
Timestamp for the transition of the alarm state. For example, the time when the alarm transitioned from OK to Firing. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "timestamp_triggered")
@pulumi.output_type
class GetAlarmStatusesAlarmStatusSuppressionResult(dict):
def __init__(__self__, *,
description: str,
time_suppress_from: str,
time_suppress_until: str):
"""
:param str description: Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
:param str time_suppress_from: The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str time_suppress_until: The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "time_suppress_from", time_suppress_from)
pulumi.set(__self__, "time_suppress_until", time_suppress_until)
@property
@pulumi.getter
def description(self) -> str:
"""
Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="timeSuppressFrom")
def time_suppress_from(self) -> str:
"""
The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_from")
@property
@pulumi.getter(name="timeSuppressUntil")
def time_suppress_until(self) -> str:
"""
The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_until")
@pulumi.output_type
class GetAlarmStatusesFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetAlarmSuppressionResult(dict):
def __init__(__self__, *,
description: str,
time_suppress_from: str,
time_suppress_until: str):
"""
:param str description: Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
:param str time_suppress_from: The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str time_suppress_until: The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "time_suppress_from", time_suppress_from)
pulumi.set(__self__, "time_suppress_until", time_suppress_until)
@property
@pulumi.getter
def description(self) -> str:
"""
Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="timeSuppressFrom")
def time_suppress_from(self) -> str:
"""
The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_from")
@property
@pulumi.getter(name="timeSuppressUntil")
def time_suppress_until(self) -> str:
"""
The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_until")
@pulumi.output_type
class GetAlarmsAlarmResult(dict):
def __init__(__self__, *,
body: str,
compartment_id: str,
defined_tags: Mapping[str, Any],
destinations: Sequence[str],
display_name: str,
freeform_tags: Mapping[str, Any],
id: str,
is_enabled: bool,
metric_compartment_id: str,
metric_compartment_id_in_subtree: bool,
namespace: str,
pending_duration: str,
query: str,
repeat_notification_duration: str,
resolution: str,
resource_group: str,
severity: str,
state: str,
suppression: 'outputs.GetAlarmsAlarmSuppressionResult',
time_created: str,
time_updated: str):
"""
:param str body: The human-readable content of the notification delivered. Oracle recommends providing guidance to operators for resolving the alarm condition. Consider adding links to standard runbook practices. Avoid entering confidential information. Example: `High CPU usage alert. Follow runbook instructions for resolution.`
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
:param Mapping[str, Any] defined_tags: Usage of predefined tag keys. These predefined keys are scoped to namespaces. Example: `{"Operations.CostCenter": "42"}`
:param Sequence[str] destinations: A list of destinations to which the notifications for this alarm will be delivered. Each destination is represented by an [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) related to the supported destination service. For example, a destination using the Notifications service is represented by a topic OCID. Supported destination services: Notifications Service. Limit: One destination per supported destination service.
:param str display_name: A filter to return only resources that match the given display name exactly. Use this filter to list an alarm by name. Alternatively, when you know the alarm OCID, use the GetAlarm operation.
:param Mapping[str, Any] freeform_tags: Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"Department": "Finance"}`
:param str id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the alarm.
:param bool is_enabled: Whether the alarm is enabled. Example: `true`
:param str metric_compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric being evaluated by the alarm.
:param bool metric_compartment_id_in_subtree: When true, the alarm evaluates metrics from all compartments and subcompartments. The parameter can only be set to true when metricCompartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, the alarm evaluates metrics from only the compartment specified in metricCompartmentId. Default is false. Example: `true`
:param str namespace: The source service or application emitting the metric that is evaluated by the alarm. Example: `oci_computeagent`
:param str pending_duration: The period of time that the condition defined in the alarm must persist before the alarm state changes from "OK" to "FIRING". For example, a value of 5 minutes means that the alarm must persist in breaching the condition for five minutes before the alarm updates its state to "FIRING".
:param str query: The Monitoring Query Language (MQL) expression to evaluate for the alarm. The Alarms feature of the Monitoring service interprets results for each returned time series as Boolean values, where zero represents false and a non-zero value represents true. A true value means that the trigger rule condition has been met. The query must specify a metric, statistic, interval, and trigger rule (threshold or absence). Supported values for interval: `1m`-`60m` (also `1h`). You can optionally specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. For details about Monitoring Query Language (MQL), see [Monitoring Query Language (MQL) Reference](https://docs.cloud.oracle.com/iaas/Content/Monitoring/Reference/mql.htm). For available dimensions, review the metric definition for the supported service. See [Supported Services](https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#SupportedServices).
:param str repeat_notification_duration: The frequency at which notifications are re-submitted, if the alarm keeps firing without interruption. Format defined by ISO 8601. For example, `PT4H` indicates four hours. Minimum: PT1M. Maximum: P30D.
:param str resolution: The time between calculated aggregation windows for the alarm. Supported value: `1m`
:param str resource_group: Resource group specified as a filter for metric data retrieved by the alarm. A resource group is a custom string that can be used as a filter. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
:param str severity: The perceived type of response required when the alarm is in the "FIRING" state. Example: `CRITICAL`
:param str state: A filter to return only alarms that match the given lifecycle state exactly. When not specified, only alarms in the ACTIVE lifecycle state are listed.
:param 'GetAlarmsAlarmSuppressionArgs' suppression: The configuration details for suppressing an alarm.
:param str time_created: The date and time the alarm was created. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str time_updated: The date and time the alarm was last updated. Format defined by RFC3339. Example: `2019-02-03T01:02:29.600Z`
"""
pulumi.set(__self__, "body", body)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "defined_tags", defined_tags)
pulumi.set(__self__, "destinations", destinations)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "freeform_tags", freeform_tags)
pulumi.set(__self__, "id", id)
pulumi.set(__self__, "is_enabled", is_enabled)
pulumi.set(__self__, "metric_compartment_id", metric_compartment_id)
pulumi.set(__self__, "metric_compartment_id_in_subtree", metric_compartment_id_in_subtree)
pulumi.set(__self__, "namespace", namespace)
pulumi.set(__self__, "pending_duration", pending_duration)
pulumi.set(__self__, "query", query)
pulumi.set(__self__, "repeat_notification_duration", repeat_notification_duration)
pulumi.set(__self__, "resolution", resolution)
pulumi.set(__self__, "resource_group", resource_group)
pulumi.set(__self__, "severity", severity)
pulumi.set(__self__, "state", state)
pulumi.set(__self__, "suppression", suppression)
pulumi.set(__self__, "time_created", time_created)
pulumi.set(__self__, "time_updated", time_updated)
@property
@pulumi.getter
def body(self) -> str:
"""
The human-readable content of the notification delivered. Oracle recommends providing guidance to operators for resolving the alarm condition. Consider adding links to standard runbook practices. Avoid entering confidential information. Example: `High CPU usage alert. Follow runbook instructions for resolution.`
"""
return pulumi.get(self, "body")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Usage of predefined tag keys. These predefined keys are scoped to namespaces. Example: `{"Operations.CostCenter": "42"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def destinations(self) -> Sequence[str]:
"""
A list of destinations to which the notifications for this alarm will be delivered. Each destination is represented by an [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) related to the supported destination service. For example, a destination using the Notifications service is represented by a topic OCID. Supported destination services: Notifications Service. Limit: One destination per supported destination service.
"""
return pulumi.get(self, "destinations")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
A filter to return only resources that match the given display name exactly. Use this filter to list an alarm by name. Alternatively, when you know the alarm OCID, use the GetAlarm operation.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"Department": "Finance"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter
def id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the alarm.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> bool:
"""
Whether the alarm is enabled. Example: `true`
"""
return pulumi.get(self, "is_enabled")
@property
@pulumi.getter(name="metricCompartmentId")
def metric_compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the metric being evaluated by the alarm.
"""
return pulumi.get(self, "metric_compartment_id")
@property
@pulumi.getter(name="metricCompartmentIdInSubtree")
def metric_compartment_id_in_subtree(self) -> bool:
"""
When true, the alarm evaluates metrics from all compartments and subcompartments. The parameter can only be set to true when metricCompartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, the alarm evaluates metrics from only the compartment specified in metricCompartmentId. Default is false. Example: `true`
"""
return pulumi.get(self, "metric_compartment_id_in_subtree")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The source service or application emitting the metric that is evaluated by the alarm. Example: `oci_computeagent`
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="pendingDuration")
def pending_duration(self) -> str:
"""
The period of time that the condition defined in the alarm must persist before the alarm state changes from "OK" to "FIRING". For example, a value of 5 minutes means that the alarm must persist in breaching the condition for five minutes before the alarm updates its state to "FIRING".
"""
return pulumi.get(self, "pending_duration")
@property
@pulumi.getter
def query(self) -> str:
"""
The Monitoring Query Language (MQL) expression to evaluate for the alarm. The Alarms feature of the Monitoring service interprets results for each returned time series as Boolean values, where zero represents false and a non-zero value represents true. A true value means that the trigger rule condition has been met. The query must specify a metric, statistic, interval, and trigger rule (threshold or absence). Supported values for interval: `1m`-`60m` (also `1h`). You can optionally specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`. For details about Monitoring Query Language (MQL), see [Monitoring Query Language (MQL) Reference](https://docs.cloud.oracle.com/iaas/Content/Monitoring/Reference/mql.htm). For available dimensions, review the metric definition for the supported service. See [Supported Services](https://docs.cloud.oracle.com/iaas/Content/Monitoring/Concepts/monitoringoverview.htm#SupportedServices).
"""
return pulumi.get(self, "query")
@property
@pulumi.getter(name="repeatNotificationDuration")
def repeat_notification_duration(self) -> str:
"""
The frequency at which notifications are re-submitted, if the alarm keeps firing without interruption. Format defined by ISO 8601. For example, `PT4H` indicates four hours. Minimum: PT1M. Maximum: P30D.
"""
return pulumi.get(self, "repeat_notification_duration")
@property
@pulumi.getter
def resolution(self) -> str:
"""
The time between calculated aggregation windows for the alarm. Supported value: `1m`
"""
return pulumi.get(self, "resolution")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
"""
Resource group specified as a filter for metric data retrieved by the alarm. A resource group is a custom string that can be used as a filter. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
"""
return pulumi.get(self, "resource_group")
@property
@pulumi.getter
def severity(self) -> str:
"""
The perceived type of response required when the alarm is in the "FIRING" state. Example: `CRITICAL`
"""
return pulumi.get(self, "severity")
@property
@pulumi.getter
def state(self) -> str:
"""
A filter to return only alarms that match the given lifecycle state exactly. When not specified, only alarms in the ACTIVE lifecycle state are listed.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def suppression(self) -> 'outputs.GetAlarmsAlarmSuppressionResult':
"""
The configuration details for suppressing an alarm.
"""
return pulumi.get(self, "suppression")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The date and time the alarm was created. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> str:
"""
The date and time the alarm was last updated. Format defined by RFC3339. Example: `2019-02-03T01:02:29.600Z`
"""
return pulumi.get(self, "time_updated")
@pulumi.output_type
class GetAlarmsAlarmSuppressionResult(dict):
def __init__(__self__, *,
description: str,
time_suppress_from: str,
time_suppress_until: str):
"""
:param str description: Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
:param str time_suppress_from: The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param str time_suppress_until: The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "time_suppress_from", time_suppress_from)
pulumi.set(__self__, "time_suppress_until", time_suppress_until)
@property
@pulumi.getter
def description(self) -> str:
"""
Human-readable reason for suppressing alarm notifications. It does not have to be unique, and it's changeable. Avoid entering confidential information.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="timeSuppressFrom")
def time_suppress_from(self) -> str:
"""
The start date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_from")
@property
@pulumi.getter(name="timeSuppressUntil")
def time_suppress_until(self) -> str:
"""
The end date and time for the suppression to take place, inclusive. Format defined by RFC3339. Example: `2019-02-01T02:02:29.600Z`
"""
return pulumi.get(self, "time_suppress_until")
@pulumi.output_type
class GetAlarmsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetMetricDataFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
"""
:param str name: The name of the metric. Example: `CpuUtilization`
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the metric. Example: `CpuUtilization`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetMetricDataMetricDataResult(dict):
def __init__(__self__, *,
aggregated_datapoints: Sequence['outputs.GetMetricDataMetricDataAggregatedDatapointResult'],
compartment_id: str,
compartment_id_in_subtree: bool,
dimensions: Mapping[str, Any],
end_time: str,
metadata: Mapping[str, Any],
name: str,
namespace: str,
query: str,
resolution: str,
resource_group: str,
start_time: str):
"""
:param Sequence['GetMetricDataMetricDataAggregatedDatapointArgs'] aggregated_datapoints: The list of timestamp-value pairs returned for the specified request. Metric values are rolled up to the start time specified in the request. For important limits information related to data points, see MetricData Reference at the top of this page.
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
:param bool compartment_id_in_subtree: When true, returns resources from all compartments and subcompartments. The parameter can only be set to true when compartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, returns resources from only the compartment specified in compartmentId. Default is false.
:param Mapping[str, Any] dimensions: Qualifiers provided in the definition of the returned metric. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: `"resourceId": "ocid1.instance.region1.phx.exampleuniqueID"`
:param str end_time: The end of the time range to use when searching for metric data points. Format is defined by RFC3339. The response excludes metric data points for the endTime. Default value: the timestamp representing when the call was sent. Example: `2019-02-01T02:02:29.600Z`
:param Mapping[str, Any] metadata: The references provided in a metric definition to indicate extra information about the metric. Example: `"unit": "bytes"`
:param str name: The name of the metric. Example: `CpuUtilization`
:param str namespace: The source service or application to use when searching for metric data points to aggregate. Example: `oci_computeagent`
:param str query: The Monitoring Query Language (MQL) expression to use when searching for metric data points to aggregate. The query must specify a metric, statistic, and interval. Supported values for interval: `1m`-`60m` (also `1h`). You can optionally specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`.
:param str resolution: The time between calculated aggregation windows. Use with the query interval to vary the frequency at which aggregated data points are returned. For example, use a query interval of 5 minutes with a resolution of 1 minute to retrieve five-minute aggregations at a one-minute frequency. The resolution must be equal or less than the interval in the query. The default resolution is 1m (one minute). Supported values: `1m`-`60m` (also `1h`). Example: `5m`
:param str resource_group: Resource group that you want to use as a filter. The specified resource group must exist in the definition of the posted metric. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
:param str start_time: The beginning of the time range to use when searching for metric data points. Format is defined by RFC3339. The response includes metric data points for the startTime. Default value: the timestamp 3 hours before the call was sent. Example: `2019-02-01T01:02:29.600Z`
"""
pulumi.set(__self__, "aggregated_datapoints", aggregated_datapoints)
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "compartment_id_in_subtree", compartment_id_in_subtree)
pulumi.set(__self__, "dimensions", dimensions)
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "metadata", metadata)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "namespace", namespace)
pulumi.set(__self__, "query", query)
pulumi.set(__self__, "resolution", resolution)
pulumi.set(__self__, "resource_group", resource_group)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="aggregatedDatapoints")
def aggregated_datapoints(self) -> Sequence['outputs.GetMetricDataMetricDataAggregatedDatapointResult']:
"""
The list of timestamp-value pairs returned for the specified request. Metric values are rolled up to the start time specified in the request. For important limits information related to data points, see MetricData Reference at the top of this page.
"""
return pulumi.get(self, "aggregated_datapoints")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="compartmentIdInSubtree")
def compartment_id_in_subtree(self) -> bool:
"""
When true, returns resources from all compartments and subcompartments. The parameter can only be set to true when compartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, returns resources from only the compartment specified in compartmentId. Default is false.
"""
return pulumi.get(self, "compartment_id_in_subtree")
@property
@pulumi.getter
def dimensions(self) -> Mapping[str, Any]:
"""
Qualifiers provided in the definition of the returned metric. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: `"resourceId": "ocid1.instance.region1.phx.exampleuniqueID"`
"""
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
"""
The end of the time range to use when searching for metric data points. Format is defined by RFC3339. The response excludes metric data points for the endTime. Default value: the timestamp representing when the call was sent. Example: `2019-02-01T02:02:29.600Z`
"""
return pulumi.get(self, "end_time")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, Any]:
"""
The references provided in a metric definition to indicate extra information about the metric. Example: `"unit": "bytes"`
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the metric. Example: `CpuUtilization`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The source service or application to use when searching for metric data points to aggregate. Example: `oci_computeagent`
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter
def query(self) -> str:
"""
The Monitoring Query Language (MQL) expression to use when searching for metric data points to aggregate. The query must specify a metric, statistic, and interval. Supported values for interval: `1m`-`60m` (also `1h`). You can optionally specify dimensions and grouping functions. Supported grouping functions: `grouping()`, `groupBy()`.
"""
return pulumi.get(self, "query")
@property
@pulumi.getter
def resolution(self) -> str:
"""
The time between calculated aggregation windows. Use with the query interval to vary the frequency at which aggregated data points are returned. For example, use a query interval of 5 minutes with a resolution of 1 minute to retrieve five-minute aggregations at a one-minute frequency. The resolution must be equal or less than the interval in the query. The default resolution is 1m (one minute). Supported values: `1m`-`60m` (also `1h`). Example: `5m`
"""
return pulumi.get(self, "resolution")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
"""
Resource group that you want to use as a filter. The specified resource group must exist in the definition of the posted metric. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
"""
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
"""
The beginning of the time range to use when searching for metric data points. Format is defined by RFC3339. The response includes metric data points for the startTime. Default value: the timestamp 3 hours before the call was sent. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "start_time")
@pulumi.output_type
class GetMetricDataMetricDataAggregatedDatapointResult(dict):
def __init__(__self__, *,
timestamp: str,
value: float):
"""
:param str timestamp: The date and time associated with the value of this data point. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
:param float value: Numeric value of the metric. Example: `10.4`
"""
pulumi.set(__self__, "timestamp", timestamp)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def timestamp(self) -> str:
"""
The date and time associated with the value of this data point. Format defined by RFC3339. Example: `2019-02-01T01:02:29.600Z`
"""
return pulumi.get(self, "timestamp")
@property
@pulumi.getter
def value(self) -> float:
"""
Numeric value of the metric. Example: `10.4`
"""
return pulumi.get(self, "value")
@pulumi.output_type
class GetMetricsFilterResult(dict):
def __init__(__self__, *,
name: str,
values: Sequence[str],
regex: Optional[bool] = None):
"""
:param str name: The metric name to use when searching for metric definitions. Example: `CpuUtilization`
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "values", values)
if regex is not None:
pulumi.set(__self__, "regex", regex)
@property
@pulumi.getter
def name(self) -> str:
"""
The metric name to use when searching for metric definitions. Example: `CpuUtilization`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def values(self) -> Sequence[str]:
return pulumi.get(self, "values")
@property
@pulumi.getter
def regex(self) -> Optional[bool]:
return pulumi.get(self, "regex")
@pulumi.output_type
class GetMetricsMetricResult(dict):
def __init__(__self__, *,
compartment_id: str,
compartment_id_in_subtree: bool,
dimension_filters: Mapping[str, Any],
dimensions: Mapping[str, Any],
group_bies: Sequence[str],
name: str,
namespace: str,
resource_group: str):
"""
:param str compartment_id: The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
:param bool compartment_id_in_subtree: When true, returns resources from all compartments and subcompartments. The parameter can only be set to true when compartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, returns resources from only the compartment specified in compartmentId. Default is false.
:param Mapping[str, Any] dimension_filters: Qualifiers that you want to use when searching for metric definitions. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: { "resourceId": "<var><instance_OCID></var>" }
:param Mapping[str, Any] dimensions: Qualifiers provided in a metric definition. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: `"resourceId": "ocid1.instance.region1.phx.exampleuniqueID"`
:param Sequence[str] group_bies: Group metrics by these fields in the response. For example, to list all metric namespaces available in a compartment, groupBy the "namespace" field. Supported fields: namespace, name, resourceGroup.
:param str name: The metric name to use when searching for metric definitions. Example: `CpuUtilization`
:param str namespace: The source service or application to use when searching for metric definitions. Example: `oci_computeagent`
:param str resource_group: Resource group that you want to use as a filter. The specified resource group must exist in the definition of the posted metric. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
"""
pulumi.set(__self__, "compartment_id", compartment_id)
pulumi.set(__self__, "compartment_id_in_subtree", compartment_id_in_subtree)
pulumi.set(__self__, "dimension_filters", dimension_filters)
pulumi.set(__self__, "dimensions", dimensions)
pulumi.set(__self__, "group_bies", group_bies)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "namespace", namespace)
pulumi.set(__self__, "resource_group", resource_group)
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
The [OCID](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm) of the compartment containing the resources monitored by the metric that you are searching for. Use tenancyId to search in the root compartment. Example: `ocid1.compartment.oc1..exampleuniqueID`
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="compartmentIdInSubtree")
def compartment_id_in_subtree(self) -> bool:
"""
When true, returns resources from all compartments and subcompartments. The parameter can only be set to true when compartmentId is the tenancy OCID (the tenancy is the root compartment). A true value requires the user to have tenancy-level permissions. If this requirement is not met, then the call is rejected. When false, returns resources from only the compartment specified in compartmentId. Default is false.
"""
return pulumi.get(self, "compartment_id_in_subtree")
@property
@pulumi.getter(name="dimensionFilters")
def dimension_filters(self) -> Mapping[str, Any]:
"""
Qualifiers that you want to use when searching for metric definitions. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: { "resourceId": "<var><instance_OCID></var>" }
"""
return pulumi.get(self, "dimension_filters")
@property
@pulumi.getter
def dimensions(self) -> Mapping[str, Any]:
"""
Qualifiers provided in a metric definition. Available dimensions vary by metric namespace. Each dimension takes the form of a key-value pair. Example: `"resourceId": "ocid1.instance.region1.phx.exampleuniqueID"`
"""
return pulumi.get(self, "dimensions")
@property
@pulumi.getter(name="groupBies")
def group_bies(self) -> Sequence[str]:
"""
Group metrics by these fields in the response. For example, to list all metric namespaces available in a compartment, groupBy the "namespace" field. Supported fields: namespace, name, resourceGroup.
"""
return pulumi.get(self, "group_bies")
@property
@pulumi.getter
def name(self) -> str:
"""
The metric name to use when searching for metric definitions. Example: `CpuUtilization`
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def namespace(self) -> str:
"""
The source service or application to use when searching for metric definitions. Example: `oci_computeagent`
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> str:
"""
Resource group that you want to use as a filter. The specified resource group must exist in the definition of the posted metric. Only one resource group can be applied per metric. A valid resourceGroup value starts with an alphabetical character and includes only alphanumeric characters, periods (.), underscores (_), hyphens (-), and dollar signs ($). Avoid entering confidential information. Example: `frontend-fleet`
"""
return pulumi.get(self, "resource_group")
| 54.865323
| 1,006
| 0.689706
| 6,386
| 51,738
| 5.466333
| 0.076261
| 0.015441
| 0.028303
| 0.041366
| 0.868913
| 0.860118
| 0.852383
| 0.837831
| 0.810301
| 0.80778
| 0
| 0.018358
| 0.221945
| 51,738
| 942
| 1,007
| 54.923567
| 0.848814
| 0.550698
| 0
| 0.711111
| 1
| 0.001852
| 0.147847
| 0.05044
| 0
| 0
| 0
| 0
| 0
| 1
| 0.172222
| false
| 0
| 0.011111
| 0.018519
| 0.353704
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
49a9cfa9cd88d54f102e0a2736e1565ad94ad772
| 1,762
|
py
|
Python
|
python/utils/lawcommission.py
|
barkavi87/anuvaad-corpus
|
9ea832f4228f61a7d4998205976629ea4b7c3d70
|
[
"MIT"
] | 2
|
2019-12-20T08:58:10.000Z
|
2020-05-15T14:17:43.000Z
|
python/utils/lawcommission.py
|
barkavi87/anuvaad-corpus
|
9ea832f4228f61a7d4998205976629ea4b7c3d70
|
[
"MIT"
] | 73
|
2019-08-12T16:17:33.000Z
|
2022-01-13T01:24:38.000Z
|
python/utils/lawcommission.py
|
barkavi87/anuvaad-corpus
|
9ea832f4228f61a7d4998205976629ea4b7c3d70
|
[
"MIT"
] | 1
|
2020-08-24T09:51:46.000Z
|
2020-08-24T09:51:46.000Z
|
import os.path
import requests
for i in range(274,0, -1):
if os.path.isfile('../../lawcommission/H'+str(i)+'.pdf') and os.path.isfile('../../lawcommission/Report'+str(i)+'.pdf'):
try:
file1 = open('../../lawcommission/H'+str(i)+'.pdf', 'rb')
file2 = open('../../lawcommission/Report'+str(i)+'.pdf', 'rb')
print('Sending request for '+str(i))
response = requests.post('http://localhost:5001/multiple-law', files=(
('hindi', ('hindi', file1, 'application/pdf')),
('english',('english',file2, 'application/pdf'))
))
print('Got response for '+str(i))
os.rename('../../lawcommission/H'+str(i)+'.pdf','../../lawcommission/OH'+str(i)+'.pdf')
print(response)
except Exception as e:
print(e)
print('error for '+str(i))
elif os.path.isfile('../../lawcommission/H'+str(i)+'.pdf') and os.path.isfile('../../lawcommission/report'+str(i)+'.pdf'):
try:
print('Sending request for '+str(i))
file1 = open('../../lawcommission/H'+str(i)+'.pdf', 'rb')
file2 = open('../../lawcommission/report'+str(i)+'.pdf', 'rb')
response = requests.post('http://localhost:5001/multiple-law', files=(
('hindi', ('hindi', file1, 'application/pdf')),
('english',('english',file2, 'application/pdf'))
))
print('Got response for '+str(i))
os.rename('../../lawcommission/H'+str(i)+'.pdf','../../lawcommission/OH'+str(i)+'.pdf')
print(response)
except Exception as e:
print(e)
print('error for '+str(i))
else:
print('not available '+str(i))
| 47.621622
| 126
| 0.512486
| 200
| 1,762
| 4.515
| 0.25
| 0.084164
| 0.093023
| 0.119601
| 0.919158
| 0.919158
| 0.861573
| 0.861573
| 0.861573
| 0.861573
| 0
| 0.016104
| 0.259932
| 1,762
| 37
| 127
| 47.621622
| 0.67638
| 0
| 0
| 0.742857
| 0
| 0
| 0.34827
| 0.155417
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.057143
| 0
| 0.057143
| 0.314286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49b3ff2be93ec6f8ecfb676fc4ebff6f457b255a
| 173
|
py
|
Python
|
medicare_appeals/tests/conftest.py
|
18F/medicare-appeals-prototyping
|
51f7a4a2f9dee4a78ef8197d8a80ed255e3dcc56
|
[
"CC0-1.0"
] | 1
|
2019-07-01T19:18:10.000Z
|
2019-07-01T19:18:10.000Z
|
medicare_appeals/tests/conftest.py
|
18F/medicare-appeals-prototyping
|
51f7a4a2f9dee4a78ef8197d8a80ed255e3dcc56
|
[
"CC0-1.0"
] | 1
|
2019-03-07T00:36:56.000Z
|
2019-03-07T00:36:56.000Z
|
medicare_appeals/tests/conftest.py
|
18F/medicare-appeals-prototyping
|
51f7a4a2f9dee4a78ef8197d8a80ed255e3dcc56
|
[
"CC0-1.0"
] | 1
|
2021-02-14T09:47:01.000Z
|
2021-02-14T09:47:01.000Z
|
import pytest
from . import factories
# @pytest.fixture(scope='session')
# def django_db_setup(django_db_setup, django_db_blocker):
# with django_db_blocker.unblock():
| 24.714286
| 58
| 0.774566
| 24
| 173
| 5.25
| 0.583333
| 0.253968
| 0.206349
| 0.301587
| 0.269841
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115607
| 173
| 6
| 59
| 28.833333
| 0.823529
| 0.734104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
49c89405e8f326f1c43dd28c3f15ab2c1725af03
| 22,447
|
py
|
Python
|
Packages/Dead/reqm/_GlobalIDL/__init__.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 62
|
2018-03-30T15:46:56.000Z
|
2021-12-08T23:30:24.000Z
|
Packages/Dead/reqm/_GlobalIDL/__init__.py
|
xylar/cdat
|
8a5080cb18febfde365efc96147e25f51494a2bf
|
[
"BSD-3-Clause"
] | 114
|
2018-03-21T01:12:43.000Z
|
2021-07-05T12:29:54.000Z
|
Packages/Dead/reqm/_GlobalIDL/__init__.py
|
CDAT/uvcdat
|
5133560c0c049b5c93ee321ba0af494253b44f91
|
[
"BSD-3-Clause"
] | 14
|
2018-06-06T02:42:47.000Z
|
2021-11-26T03:27:00.000Z
|
""" Module:
Automagically generated by:-
The ORB called Fnorb v1.1.Return.of.Fnorb
"""
_FNORB_ID = ""
# Fnorb modules.
import Fnorb.orb.CORBA
import Fnorb.orb.TypeManager
import Fnorb.orb.Util
class reqException(Fnorb.orb.CORBA.UserException):
""" Exception: IDL:reqException:1.0 """
_FNORB_ID = "IDL:reqException:1.0"
def __init__(self, _why):
""" Constructor. """
self.why = _why
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.why,)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:reqException:1.0", "010000001600000048000000010000001500000049444C3A726571457863657074696F6E3A312E30000000000D000000726571457863657074696F6E000000000100000004000000776879001200000000000000", reqException)
# Enum: IDL:REQ_STATE_T:1.0
REQ_INVALD = Fnorb.orb.Util.EnumMember("REQ_INVALD", 0)
REQ_ACCEPTED = Fnorb.orb.Util.EnumMember("REQ_ACCEPTED", 1)
REQ_COMPLETED = Fnorb.orb.Util.EnumMember("REQ_COMPLETED", 2)
REQ_STATE_T = Fnorb.orb.Util.Enum("IDL:REQ_STATE_T:1.0", [REQ_INVALD, REQ_ACCEPTED, REQ_COMPLETED])
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:REQ_STATE_T:1.0", "010000001100000066000000010000001400000049444C3A5245515F53544154455F543A312E30000C0000005245515F53544154455F5400030000000B0000005245515F494E56414C4400000D0000005245515F4143434550544544000000000E0000005245515F434F4D504C4554454400", REQ_STATE_T)
# Alias: IDL:STRSEQ_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:STRSEQ_T:1.0", "01000000130000001000000001000000120000000000000000000000", None)
class TUPLE:
""" Struct: IDL:TUPLE:1.0 """
_FNORB_ID = "IDL:TUPLE:1.0"
def __init__(self, _lower, _upper, _stride):
""" Constructor. """
self.lower = _lower
self.upper = _upper
self.stride = _stride
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.lower, self.upper, self.stride)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:TUPLE:1.0", "010000000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F776572000000030000000600000075707065720000000300000007000000737472696465000003000000", TUPLE)
# Alias: IDL:TUPLE_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:TUPLE_T:1.0", "010000000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F776572000000030000000600000075707065720000000300000007000000737472696465000003000000", None)
# Alias: IDL:TUPLES_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:TUPLES_T:1.0", "010000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F77657200000003000000060000007570706572000000030000000700000073747269646500000300000000000000", None)
class FILE_LOCATION:
""" Struct: IDL:FILE_LOCATION:1.0 """
_FNORB_ID = "IDL:FILE_LOCATION:1.0"
def __init__(self, _dataset_name, _relative_path):
""" Constructor. """
self.dataset_name = _dataset_name
self.relative_path = _relative_path
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.dataset_name, self.relative_path)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:FILE_LOCATION:1.0", "010000000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F706174680000001200000000000000", FILE_LOCATION)
# Alias: IDL:FILE_LOCATION_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:FILE_LOCATION_T:1.0", "010000000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F706174680000001200000000000000", None)
class SLABSPEC:
""" Struct: IDL:SLABSPEC:1.0 """
_FNORB_ID = "IDL:SLABSPEC:1.0"
def __init__(self, _variable, _data_type, _selection_spec):
""" Constructor. """
self.variable = _variable
self.data_type = _data_type
self.selection_spec = _selection_spec
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.variable, self.data_type, self.selection_spec)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:SLABSPEC:1.0", "010000000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F77657200000003000000060000007570706572000000030000000700000073747269646500000300000000000000", SLABSPEC)
# Alias: IDL:SLABSPEC_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:SLABSPEC_T:1.0", "010000000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F77657200000003000000060000007570706572000000030000000700000073747269646500000300000000000000", None)
# Alias: IDL:SLABSPECS_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:SLABSPECS_T:1.0", "010000001300000088010000010000001500000078010000010000001300000049444C3A534C4142535045435F543A312E3000000B000000534C4142535045435F5400000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F7765720000000300000006000000757070657200000003000000070000007374726964650000030000000000000000000000", None)
class REQUEST:
""" Struct: IDL:REQUEST:1.0 """
_FNORB_ID = "IDL:REQUEST:1.0"
def __init__(self, _source, _target, _slabspecs, _search_replicas):
""" Constructor. """
self.source = _source
self.target = _target
self.slabspecs = _slabspecs
self.search_replicas = _search_replicas
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.source, self.target, self.slabspecs, self.search_replicas)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:REQUEST:1.0", "010000000F00000094030000010000001000000049444C3A524551554553543A312E30000800000052455155455354000400000007000000736F75726365000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F70617468000000120000000000000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000A000000736C6162737065637300000015000000BC010000010000001400000049444C3A534C414253504543535F543A312E30000C000000534C414253504543535F54001300000088010000010000001500000078010000010000001300000049444C3A534C4142535045435F543A312E3000000B000000534C4142535045435F5400000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F7765720000000300000006000000757070657200000003000000070000007374726964650000030000000000000000000000100000007365617263685F7265706C696361730008000000", REQUEST)
# Alias: IDL:REQUEST_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:REQUEST_T:1.0", "010000000F00000094030000010000001000000049444C3A524551554553543A312E30000800000052455155455354000400000007000000736F75726365000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F70617468000000120000000000000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000A000000736C6162737065637300000015000000BC010000010000001400000049444C3A534C414253504543535F543A312E30000C000000534C414253504543535F54001300000088010000010000001500000078010000010000001300000049444C3A534C4142535045435F543A312E3000000B000000534C4142535045435F5400000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F7765720000000300000006000000757070657200000003000000070000007374726964650000030000000000000000000000100000007365617263685F7265706C696361730008000000", None)
# Alias: IDL:REQUESTS_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:REQUESTS_T:1.0", "0100000013000000D80300000100000015000000C8030000010000001200000049444C3A524551554553545F543A312E300000000A000000524551554553545F540000000F00000094030000010000001000000049444C3A524551554553543A312E30000800000052455155455354000400000007000000736F75726365000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F70617468000000120000000000000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000A000000736C6162737065637300000015000000BC010000010000001400000049444C3A534C414253504543535F543A312E30000C000000534C414253504543535F54001300000088010000010000001500000078010000010000001300000049444C3A534C4142535045435F543A312E3000000B000000534C4142535045435F5400000F00000044010000010000001100000049444C3A534C4142535045433A312E300000000009000000534C4142535045430000000003000000090000007661726961626C650000000012000000000000000A000000646174615F7479706500000012000000000000000F00000073656C656374696F6E5F73706563000015000000C8000000010000001100000049444C3A5455504C45535F543A312E3000000000090000005455504C45535F54000000001300000094000000010000001500000084000000010000001000000049444C3A5455504C455F543A312E3000080000005455504C455F54000F00000058000000010000000E00000049444C3A5455504C453A312E30000000060000005455504C4500000003000000060000006C6F7765720000000300000006000000757070657200000003000000070000007374726964650000030000000000000000000000100000007365617263685F7265706C69636173000800000000000000", None)
class FILE_STATUS:
""" Struct: IDL:FILE_STATUS:1.0 """
_FNORB_ID = "IDL:FILE_STATUS:1.0"
def __init__(self, _target, _size):
""" Constructor. """
self.target = _target
self.size = _size
return
def __getinitargs__(self):
""" Return the constructor arguments for unpickling. """
return (self.target, self.size)
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:FILE_STATUS:1.0", "010000000F00000000010000010000001400000049444C3A46494C455F5354415455533A312E30000C00000046494C455F535441545553000200000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000500000073697A650000000007000000", FILE_STATUS)
# Alias: IDL:FILE_STATUS_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:FILE_STATUS_T:1.0", "010000000F00000000010000010000001400000049444C3A46494C455F5354415455533A312E30000C00000046494C455F535441545553000200000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000500000073697A650000000007000000", None)
# Alias: IDL:FILES_STATUS_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:FILES_STATUS_T:1.0", "01000000130000004C01000001000000150000003C010000010000001600000049444C3A46494C455F5354415455535F543A312E300000000E00000046494C455F5354415455535F540000000F00000000010000010000001400000049444C3A46494C455F5354415455533A312E30000C00000046494C455F535441545553000200000007000000746172676574000015000000AC000000010000001800000049444C3A46494C455F4C4F434154494F4E5F543A312E30001000000046494C455F4C4F434154494F4E5F54000F00000070000000010000001600000049444C3A46494C455F4C4F434154494F4E3A312E300000000E00000046494C455F4C4F434154494F4E000000020000000D000000646174617365745F6E616D650000000012000000000000000E00000072656C61746976655F7061746800000012000000000000000500000073697A65000000000700000000000000", None)
# Alias: IDL:REQ_TOKEN_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:REQ_TOKEN_T:1.0", "010000001200000010000000", None)
# Alias: IDL:USER_ID_T:1.0
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:USER_ID_T:1.0", "010000001200000000000000", None)
class RequestManager(Fnorb.orb.CORBA.Object):
""" Interface: IDL:RequestManager:1.0 """
_FNORB_ID = "IDL:RequestManager:1.0"
def request(self, *args, **kw):
""" Operation: IDL:RequestManager/request:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:USER_ID_T:1.0"))
inputs.append(Fnorb.orb.CORBA.typecode("IDL:REQUESTS_T:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_boolean)
outputs.append(Fnorb.orb.CORBA.typecode("IDL:REQ_TOKEN_T:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("request", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def estimate(self, *args, **kw):
""" Operation: IDL:RequestManager/estimate:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:REQ_TOKEN_T:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_double)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("estimate", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def execute(self, *args, **kw):
""" Operation: IDL:RequestManager/execute:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:REQ_TOKEN_T:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_boolean)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("execute", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def status(self, *args, **kw):
""" Operation: IDL:RequestManager/status:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:REQ_TOKEN_T:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.typecode("IDL:FILES_STATUS_T:1.0"))
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("status", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def cancel(self, *args, **kw):
""" Operation: IDL:RequestManager/cancel:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.typecode("IDL:REQ_TOKEN_T:1.0"))
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_boolean)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("cancel", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def staged(self, *args, **kw):
""" Operation: IDL:RequestManager/staged:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_string)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_boolean)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("staged", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
def stageFailed(self, *args, **kw):
""" Operation: IDL:RequestManager/stageFailed:1.0 """
# Typecodes for 'in' and 'inout' parameters.
inputs = []
inputs.append(Fnorb.orb.CORBA.TC_string)
inputs.append(Fnorb.orb.CORBA.TC_string)
# Typecodes for the result, 'inout' and 'out' parameters.
outputs = []
outputs.append(Fnorb.orb.CORBA.TC_boolean)
# Typecodes for user exceptions.
exceptions = []
exceptions.append(Fnorb.orb.CORBA.typecode("IDL:reqException:1.0"))
# Create a request object.
request = self._create_request("stageFailed", inputs, outputs, exceptions)
# Make the request!
apply(request.invoke, args, kw)
# Return the results.
return request.results()
Fnorb.orb.TypeManager.TypeManager_init().add_type("IDL:RequestManager:1.0", "010000000E00000033000000010000001700000049444C3A526571756573744D616E616765723A312E3000000F000000526571756573744D616E6167657200", RequestManager)
#############################################################################
| 62.876751
| 2,073
| 0.825188
| 1,390
| 22,447
| 13.141727
| 0.089209
| 0.007664
| 0.005748
| 0.026003
| 0.293535
| 0.266656
| 0.244813
| 0.241857
| 0.237039
| 0.22193
| 0
| 0.53007
| 0.103666
| 22,447
| 356
| 2,074
| 63.053371
| 0.377833
| 0.119392
| 0
| 0.445161
| 1
| 0
| 0.634155
| 0.597432
| 0
| 1
| 0
| 0
| 0
| 1
| 0.122581
| false
| 0
| 0.019355
| 0
| 0.354839
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fa8932e0506b45e7a89b62cfe372aee0e6d06c2
| 7,947
|
py
|
Python
|
tests/aat/api/v1/client/api/sockets_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 20
|
2019-12-04T01:28:52.000Z
|
2022-03-17T14:09:34.000Z
|
tests/aat/api/v1/client/api/sockets_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 115
|
2020-02-04T21:29:54.000Z
|
2022-02-17T13:33:51.000Z
|
tests/aat/api/v1/client/api/sockets_api.py
|
DerangedMonkeyNinja/openperf
|
cde4dc6bf3687f0663c11e9e856e26a0dc2b1d16
|
[
"Apache-2.0"
] | 16
|
2019-12-03T16:41:18.000Z
|
2021-11-06T04:44:11.000Z
|
# coding: utf-8
"""
OpenPerf API
REST API interface for OpenPerf # noqa: E501
OpenAPI spec version: 1
Contact: support@spirent.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class SocketsApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_socket_stats(self, id, **kwargs): # noqa: E501
"""Get a socket's statistics # noqa: E501
Return a socket's statistics by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_socket_stats(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: SocketStats
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_socket_stats_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_socket_stats_with_http_info(id, **kwargs) # noqa: E501
return data
def get_socket_stats_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a socket's statistics # noqa: E501
Return a socket's statistics by id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_socket_stats_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: Unique resource identifier (required)
:return: SocketStats
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_socket_stats" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_socket_stats`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/sockets/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SocketStats', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_socket_stats(self, **kwargs): # noqa: E501
"""List network socket statistics # noqa: E501
The `sockets` endpoint returns statistics for all network sockets that are known by the stack. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_socket_stats(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[SocketStats]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_socket_stats_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_socket_stats_with_http_info(**kwargs) # noqa: E501
return data
def list_socket_stats_with_http_info(self, **kwargs): # noqa: E501
"""List network socket statistics # noqa: E501
The `sockets` endpoint returns statistics for all network sockets that are known by the stack. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_socket_stats_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[SocketStats]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_socket_stats" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/sockets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[SocketStats]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 35.32
| 117
| 0.606896
| 927
| 7,947
| 4.961165
| 0.171521
| 0.053925
| 0.022614
| 0.031311
| 0.846923
| 0.846923
| 0.821483
| 0.802348
| 0.802348
| 0.784518
| 0
| 0.018054
| 0.303007
| 7,947
| 224
| 118
| 35.477679
| 0.81224
| 0.351957
| 0
| 0.714286
| 1
| 0
| 0.149586
| 0.028008
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044643
| false
| 0
| 0.035714
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3fbdf5918ff9dabb39f4fd8373fd393e2bea1224
| 1,624
|
py
|
Python
|
tests/check_time_range_function.py
|
bsgip/c3x-data
|
32e6973a0eeb3cd72d298734d7a2937dd369a456
|
[
"MIT"
] | 2
|
2021-04-17T03:55:04.000Z
|
2022-02-25T10:08:02.000Z
|
tests/check_time_range_function.py
|
bsgip/c3x-data
|
32e6973a0eeb3cd72d298734d7a2937dd369a456
|
[
"MIT"
] | null | null | null |
tests/check_time_range_function.py
|
bsgip/c3x-data
|
32e6973a0eeb3cd72d298734d7a2937dd369a456
|
[
"MIT"
] | 2
|
2021-03-15T09:18:22.000Z
|
2021-11-28T10:39:53.000Z
|
from c3e_data_preparation.preparation import cleaners
def test_timestamp_end_h():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_hour(timestamp=timestamp, fault_placement="end")
assert end == timestamp, "timerange is wrong"
assert start != timestamp, "timerange is wrong"
def test_timestamp_start_h():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_hour(timestamp=timestamp, fault_placement="start")
assert start == timestamp, "timerange is wrong"
assert end != timestamp, "timerange is wrong"
def test_timestamp_middle_h():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_hour(timestamp=timestamp, fault_placement="middle")
assert start != timestamp, "timerange is wrong"
assert start != timestamp, "timerange is wrong"
def test_timestamp_end_d():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_day(timestamp=timestamp, fault_placement="end")
assert end == timestamp, "timerange is wrong"
assert start != timestamp, "timerange is wrong"
def test_timestamp_start_d():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_day(timestamp=timestamp, fault_placement="start")
assert start == timestamp, "timerange is wrong"
assert end != timestamp, "timerange is wrong"
def test_timestamp_middle_d():
timestamp = 1548154800
start, end = cleaners.find_time_range_method_day(timestamp=timestamp, fault_placement="middle")
assert start != timestamp, "timerange is wrong"
assert start != timestamp, "timerange is wrong"
| 31.843137
| 100
| 0.748768
| 199
| 1,624
| 5.859296
| 0.135678
| 0.185249
| 0.205832
| 0.25729
| 0.940823
| 0.940823
| 0.940823
| 0.940823
| 0.940823
| 0.940823
| 0
| 0.04482
| 0.161946
| 1,624
| 50
| 101
| 32.48
| 0.811903
| 0
| 0
| 0.580645
| 0
| 0
| 0.150339
| 0
| 0
| 0
| 0
| 0
| 0.387097
| 1
| 0.193548
| false
| 0
| 0.032258
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b770922eec2f3bdced27f23aca4bc64898142d9e
| 6,912
|
py
|
Python
|
test/test_cells_page_breaks_api.py
|
aspose-cells-cloud/aspose-cells-cloud-python
|
0189236d38053dc67f7edc754b5101f17262cee8
|
[
"MIT"
] | 3
|
2018-05-23T03:16:26.000Z
|
2020-11-07T11:42:41.000Z
|
test/test_cells_page_breaks_api.py
|
aspose-cells-cloud/aspose-cells-cloud-python
|
0189236d38053dc67f7edc754b5101f17262cee8
|
[
"MIT"
] | null | null | null |
test/test_cells_page_breaks_api.py
|
aspose-cells-cloud/aspose-cells-cloud-python
|
0189236d38053dc67f7edc754b5101f17262cee8
|
[
"MIT"
] | 4
|
2018-08-29T18:45:05.000Z
|
2021-03-25T07:59:56.000Z
|
# coding: utf-8
"""
Web API Swagger specification
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import warnings
ABSPATH = os.path.abspath(os.path.realpath(os.path.dirname(__file__)) + "/..")
sys.path.append(ABSPATH)
import asposecellscloud
from asposecellscloud.rest import ApiException
from asposecellscloud.apis.cells_api import CellsApi
import AuthUtil
global_api = None
class TestCellsPageBreaksApi(unittest.TestCase):
""" CellsPageBreaksApi unit test stubs """
def setUp(self):
warnings.simplefilter('ignore', ResourceWarning)
global global_api
if global_api is None:
global_api = asposecellscloud.apis.cells_api.CellsApi(AuthUtil.GetClientId(),AuthUtil.GetClientSecret(),"v3.0",AuthUtil.GetBaseUrl())
self.api = global_api
def tearDown(self):
pass
def test_cells_page_breaks_delete_horizontal_page_break(self):
"""
Test case for cells_page_breaks_delete_horizontal_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
index = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_delete_horizontal_page_break(name, sheet_name,index,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_delete_horizontal_page_breaks(self):
"""
Test case for cells_page_breaks_delete_horizontal_page_breaks
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
row = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_delete_horizontal_page_breaks(name, sheet_name,row=row,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_delete_vertical_page_break(self):
"""
Test case for cells_page_breaks_delete_vertical_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
index = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_delete_vertical_page_break(name, sheet_name,index,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_delete_vertical_page_breaks(self):
"""
Test case for cells_page_breaks_delete_vertical_page_breaks
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
column = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_delete_vertical_page_breaks(name, sheet_name,column=column,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_get_horizontal_page_break(self):
"""
Test case for cells_page_breaks_get_horizontal_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
index = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_get_horizontal_page_break(name, sheet_name,index,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_get_horizontal_page_breaks(self):
"""
Test case for cells_page_breaks_get_horizontal_page_breaks
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_get_horizontal_page_breaks(name, sheet_name,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_get_vertical_page_break(self):
"""
Test case for cells_page_breaks_get_vertical_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
index = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_get_vertical_page_break(name, sheet_name,index,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_get_vertical_page_breaks(self):
"""
Test case for cells_page_breaks_get_vertical_page_breaks
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
index = 0
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_get_vertical_page_breaks(name, sheet_name,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_put_horizontal_page_break(self):
"""
Test case for cells_page_breaks_put_horizontal_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
cellname = 'A1'
row = 1
column = 1
startColumn = 1
endColumn = 1
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_put_horizontal_page_break(name, sheet_name, cellname=cellname,row=row, column=column ,start_column=startColumn,end_column=endColumn,folder=folder)
self.assertEqual(result.code,200)
pass
def test_cells_page_breaks_put_vertical_page_break(self):
"""
Test case for cells_page_breaks_put_vertical_page_break
"""
name ='Book1.xlsx'
sheet_name ='Sheet1'
cellname = 'A1'
row = 1
column = 1
startRow = 1
endRow = 1
folder = "PythonTest"
result = AuthUtil.Ready(self.api, name, folder)
self.assertTrue(len(result.uploaded)>0)
result = self.api.cells_page_breaks_put_vertical_page_break(name, sheet_name, cellname=cellname,column=column, row=row ,start_row=startRow,end_row=endRow,folder=folder)
self.assertEqual(result.code,200)
pass
if __name__ == '__main__':
unittest.main()
| 32.603774
| 190
| 0.651042
| 820
| 6,912
| 5.197561
| 0.12561
| 0.098545
| 0.105584
| 0.059127
| 0.792116
| 0.792116
| 0.787893
| 0.758329
| 0.732989
| 0.695917
| 0
| 0.016013
| 0.259115
| 6,912
| 211
| 191
| 32.758294
| 0.816247
| 0.124711
| 0
| 0.630769
| 1
| 0
| 0.050123
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.092308
| false
| 0.084615
| 0.069231
| 0
| 0.169231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
b787a3546afeb49c2a1650d5344d653309babce3
| 1,840
|
py
|
Python
|
build/darknet_ros_msgs/cmake/darknet_ros_msgs-genmsg-context.py
|
6RiverSystems/darknet_ros
|
03c72b96afa99f7cc75f7792b51deb4a7f4ed379
|
[
"BSD-3-Clause"
] | null | null | null |
build/darknet_ros_msgs/cmake/darknet_ros_msgs-genmsg-context.py
|
6RiverSystems/darknet_ros
|
03c72b96afa99f7cc75f7792b51deb4a7f4ed379
|
[
"BSD-3-Clause"
] | null | null | null |
build/darknet_ros_msgs/cmake/darknet_ros_msgs-genmsg-context.py
|
6RiverSystems/darknet_ros
|
03c72b96afa99f7cc75f7792b51deb4a7f4ed379
|
[
"BSD-3-Clause"
] | null | null | null |
# generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/kalyco/mfp_workspace/src/darknet_ros/darknet_ros_msgs/msg/BoundingBox.msg;/home/kalyco/mfp_workspace/src/darknet_ros/darknet_ros_msgs/msg/BoundingBoxes.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsAction.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsActionGoal.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsActionResult.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsActionFeedback.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsGoal.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsResult.msg;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg/CheckForObjectsFeedback.msg"
services_str = ""
pkg_name = "darknet_ros_msgs"
dependencies_str = "actionlib_msgs;geometry_msgs;sensor_msgs;std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "darknet_ros_msgs;/home/kalyco/mfp_workspace/src/darknet_ros/darknet_ros_msgs/msg;darknet_ros_msgs;/home/kalyco/mfp_workspace/devel/.private/darknet_ros_msgs/share/darknet_ros_msgs/msg;actionlib_msgs;/opt/ros/kinetic/share/actionlib_msgs/cmake/../msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg;sensor_msgs;/opt/ros/kinetic/share/sensor_msgs/cmake/../msg;std_msgs;/home/kalyco/mfp_workspace/src/std_msgs/msg"
PYTHON_EXECUTABLE = "/usr/bin/python"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| 153.333333
| 980
| 0.851087
| 272
| 1,840
| 5.4375
| 0.227941
| 0.169033
| 0.208249
| 0.178499
| 0.583502
| 0.536849
| 0.517241
| 0.49831
| 0.49831
| 0.49831
| 0
| 0
| 0.018478
| 1,840
| 11
| 981
| 167.272727
| 0.818937
| 0.02663
| 0
| 0
| 1
| 0.222222
| 0.886529
| 0.866965
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b7951ad98c24e6026573cc7c188ef92c48db335b
| 20,023
|
py
|
Python
|
sdk/python/pulumi_alicloud/marketplace/order.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/marketplace/order.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/marketplace/order.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['OrderArgs', 'Order']
@pulumi.input_type
class OrderArgs:
def __init__(__self__, *,
package_version: pulumi.Input[str],
pricing_cycle: pulumi.Input[str],
product_code: pulumi.Input[str],
components: Optional[pulumi.Input[Mapping[str, Any]]] = None,
coupon_id: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
quantity: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Order resource.
:param pulumi.Input[str] package_version: The package version of the market product.
:param pulumi.Input[str] pricing_cycle: The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
:param pulumi.Input[str] product_code: The product_code of market place product.
:param pulumi.Input[Mapping[str, Any]] components: Service providers customize additional components.
:param pulumi.Input[str] coupon_id: The coupon id of the market product.
:param pulumi.Input[int] duration: The number of purchase cycles.
:param pulumi.Input[str] pay_type: Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
:param pulumi.Input[int] quantity: The quantity of the market product will be purchased.
"""
pulumi.set(__self__, "package_version", package_version)
pulumi.set(__self__, "pricing_cycle", pricing_cycle)
pulumi.set(__self__, "product_code", product_code)
if components is not None:
pulumi.set(__self__, "components", components)
if coupon_id is not None:
pulumi.set(__self__, "coupon_id", coupon_id)
if duration is not None:
pulumi.set(__self__, "duration", duration)
if pay_type is not None:
pulumi.set(__self__, "pay_type", pay_type)
if quantity is not None:
pulumi.set(__self__, "quantity", quantity)
@property
@pulumi.getter(name="packageVersion")
def package_version(self) -> pulumi.Input[str]:
"""
The package version of the market product.
"""
return pulumi.get(self, "package_version")
@package_version.setter
def package_version(self, value: pulumi.Input[str]):
pulumi.set(self, "package_version", value)
@property
@pulumi.getter(name="pricingCycle")
def pricing_cycle(self) -> pulumi.Input[str]:
"""
The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
"""
return pulumi.get(self, "pricing_cycle")
@pricing_cycle.setter
def pricing_cycle(self, value: pulumi.Input[str]):
pulumi.set(self, "pricing_cycle", value)
@property
@pulumi.getter(name="productCode")
def product_code(self) -> pulumi.Input[str]:
"""
The product_code of market place product.
"""
return pulumi.get(self, "product_code")
@product_code.setter
def product_code(self, value: pulumi.Input[str]):
pulumi.set(self, "product_code", value)
@property
@pulumi.getter
def components(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Service providers customize additional components.
"""
return pulumi.get(self, "components")
@components.setter
def components(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "components", value)
@property
@pulumi.getter(name="couponId")
def coupon_id(self) -> Optional[pulumi.Input[str]]:
"""
The coupon id of the market product.
"""
return pulumi.get(self, "coupon_id")
@coupon_id.setter
def coupon_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "coupon_id", value)
@property
@pulumi.getter
def duration(self) -> Optional[pulumi.Input[int]]:
"""
The number of purchase cycles.
"""
return pulumi.get(self, "duration")
@duration.setter
def duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "duration", value)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> Optional[pulumi.Input[str]]:
"""
Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
"""
return pulumi.get(self, "pay_type")
@pay_type.setter
def pay_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pay_type", value)
@property
@pulumi.getter
def quantity(self) -> Optional[pulumi.Input[int]]:
"""
The quantity of the market product will be purchased.
"""
return pulumi.get(self, "quantity")
@quantity.setter
def quantity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "quantity", value)
@pulumi.input_type
class _OrderState:
def __init__(__self__, *,
components: Optional[pulumi.Input[Mapping[str, Any]]] = None,
coupon_id: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
package_version: Optional[pulumi.Input[str]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
pricing_cycle: Optional[pulumi.Input[str]] = None,
product_code: Optional[pulumi.Input[str]] = None,
quantity: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering Order resources.
:param pulumi.Input[Mapping[str, Any]] components: Service providers customize additional components.
:param pulumi.Input[str] coupon_id: The coupon id of the market product.
:param pulumi.Input[int] duration: The number of purchase cycles.
:param pulumi.Input[str] package_version: The package version of the market product.
:param pulumi.Input[str] pay_type: Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
:param pulumi.Input[str] pricing_cycle: The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
:param pulumi.Input[str] product_code: The product_code of market place product.
:param pulumi.Input[int] quantity: The quantity of the market product will be purchased.
"""
if components is not None:
pulumi.set(__self__, "components", components)
if coupon_id is not None:
pulumi.set(__self__, "coupon_id", coupon_id)
if duration is not None:
pulumi.set(__self__, "duration", duration)
if package_version is not None:
pulumi.set(__self__, "package_version", package_version)
if pay_type is not None:
pulumi.set(__self__, "pay_type", pay_type)
if pricing_cycle is not None:
pulumi.set(__self__, "pricing_cycle", pricing_cycle)
if product_code is not None:
pulumi.set(__self__, "product_code", product_code)
if quantity is not None:
pulumi.set(__self__, "quantity", quantity)
@property
@pulumi.getter
def components(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Service providers customize additional components.
"""
return pulumi.get(self, "components")
@components.setter
def components(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "components", value)
@property
@pulumi.getter(name="couponId")
def coupon_id(self) -> Optional[pulumi.Input[str]]:
"""
The coupon id of the market product.
"""
return pulumi.get(self, "coupon_id")
@coupon_id.setter
def coupon_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "coupon_id", value)
@property
@pulumi.getter
def duration(self) -> Optional[pulumi.Input[int]]:
"""
The number of purchase cycles.
"""
return pulumi.get(self, "duration")
@duration.setter
def duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "duration", value)
@property
@pulumi.getter(name="packageVersion")
def package_version(self) -> Optional[pulumi.Input[str]]:
"""
The package version of the market product.
"""
return pulumi.get(self, "package_version")
@package_version.setter
def package_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "package_version", value)
@property
@pulumi.getter(name="payType")
def pay_type(self) -> Optional[pulumi.Input[str]]:
"""
Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
"""
return pulumi.get(self, "pay_type")
@pay_type.setter
def pay_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pay_type", value)
@property
@pulumi.getter(name="pricingCycle")
def pricing_cycle(self) -> Optional[pulumi.Input[str]]:
"""
The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
"""
return pulumi.get(self, "pricing_cycle")
@pricing_cycle.setter
def pricing_cycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pricing_cycle", value)
@property
@pulumi.getter(name="productCode")
def product_code(self) -> Optional[pulumi.Input[str]]:
"""
The product_code of market place product.
"""
return pulumi.get(self, "product_code")
@product_code.setter
def product_code(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "product_code", value)
@property
@pulumi.getter
def quantity(self) -> Optional[pulumi.Input[int]]:
"""
The quantity of the market product will be purchased.
"""
return pulumi.get(self, "quantity")
@quantity.setter
def quantity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "quantity", value)
class Order(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
components: Optional[pulumi.Input[Mapping[str, Any]]] = None,
coupon_id: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
package_version: Optional[pulumi.Input[str]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
pricing_cycle: Optional[pulumi.Input[str]] = None,
product_code: Optional[pulumi.Input[str]] = None,
quantity: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
## Import
Market order can be imported using the id, e.g.
```sh
$ pulumi import alicloud:marketplace/order:Order order your-order-id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] components: Service providers customize additional components.
:param pulumi.Input[str] coupon_id: The coupon id of the market product.
:param pulumi.Input[int] duration: The number of purchase cycles.
:param pulumi.Input[str] package_version: The package version of the market product.
:param pulumi.Input[str] pay_type: Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
:param pulumi.Input[str] pricing_cycle: The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
:param pulumi.Input[str] product_code: The product_code of market place product.
:param pulumi.Input[int] quantity: The quantity of the market product will be purchased.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: OrderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Market order can be imported using the id, e.g.
```sh
$ pulumi import alicloud:marketplace/order:Order order your-order-id
```
:param str resource_name: The name of the resource.
:param OrderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(OrderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
components: Optional[pulumi.Input[Mapping[str, Any]]] = None,
coupon_id: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
package_version: Optional[pulumi.Input[str]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
pricing_cycle: Optional[pulumi.Input[str]] = None,
product_code: Optional[pulumi.Input[str]] = None,
quantity: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = OrderArgs.__new__(OrderArgs)
__props__.__dict__["components"] = components
__props__.__dict__["coupon_id"] = coupon_id
__props__.__dict__["duration"] = duration
if package_version is None and not opts.urn:
raise TypeError("Missing required property 'package_version'")
__props__.__dict__["package_version"] = package_version
__props__.__dict__["pay_type"] = pay_type
if pricing_cycle is None and not opts.urn:
raise TypeError("Missing required property 'pricing_cycle'")
__props__.__dict__["pricing_cycle"] = pricing_cycle
if product_code is None and not opts.urn:
raise TypeError("Missing required property 'product_code'")
__props__.__dict__["product_code"] = product_code
__props__.__dict__["quantity"] = quantity
super(Order, __self__).__init__(
'alicloud:marketplace/order:Order',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
components: Optional[pulumi.Input[Mapping[str, Any]]] = None,
coupon_id: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
package_version: Optional[pulumi.Input[str]] = None,
pay_type: Optional[pulumi.Input[str]] = None,
pricing_cycle: Optional[pulumi.Input[str]] = None,
product_code: Optional[pulumi.Input[str]] = None,
quantity: Optional[pulumi.Input[int]] = None) -> 'Order':
"""
Get an existing Order resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] components: Service providers customize additional components.
:param pulumi.Input[str] coupon_id: The coupon id of the market product.
:param pulumi.Input[int] duration: The number of purchase cycles.
:param pulumi.Input[str] package_version: The package version of the market product.
:param pulumi.Input[str] pay_type: Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
:param pulumi.Input[str] pricing_cycle: The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
:param pulumi.Input[str] product_code: The product_code of market place product.
:param pulumi.Input[int] quantity: The quantity of the market product will be purchased.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _OrderState.__new__(_OrderState)
__props__.__dict__["components"] = components
__props__.__dict__["coupon_id"] = coupon_id
__props__.__dict__["duration"] = duration
__props__.__dict__["package_version"] = package_version
__props__.__dict__["pay_type"] = pay_type
__props__.__dict__["pricing_cycle"] = pricing_cycle
__props__.__dict__["product_code"] = product_code
__props__.__dict__["quantity"] = quantity
return Order(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def components(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Service providers customize additional components.
"""
return pulumi.get(self, "components")
@property
@pulumi.getter(name="couponId")
def coupon_id(self) -> pulumi.Output[Optional[str]]:
"""
The coupon id of the market product.
"""
return pulumi.get(self, "coupon_id")
@property
@pulumi.getter
def duration(self) -> pulumi.Output[Optional[int]]:
"""
The number of purchase cycles.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter(name="packageVersion")
def package_version(self) -> pulumi.Output[str]:
"""
The package version of the market product.
"""
return pulumi.get(self, "package_version")
@property
@pulumi.getter(name="payType")
def pay_type(self) -> pulumi.Output[Optional[str]]:
"""
Valid values are `PrePaid`, `PostPaid`,System default to `PostPaid`.
"""
return pulumi.get(self, "pay_type")
@property
@pulumi.getter(name="pricingCycle")
def pricing_cycle(self) -> pulumi.Output[str]:
"""
The purchase cycle of the product, valid values are `Day`, `Month` and `Year`.
"""
return pulumi.get(self, "pricing_cycle")
@property
@pulumi.getter(name="productCode")
def product_code(self) -> pulumi.Output[str]:
"""
The product_code of market place product.
"""
return pulumi.get(self, "product_code")
@property
@pulumi.getter
def quantity(self) -> pulumi.Output[Optional[int]]:
"""
The quantity of the market product will be purchased.
"""
return pulumi.get(self, "quantity")
| 40.697154
| 134
| 0.630974
| 2,324
| 20,023
| 5.22031
| 0.071429
| 0.097923
| 0.077316
| 0.065282
| 0.873558
| 0.852456
| 0.830531
| 0.813139
| 0.794016
| 0.781569
| 0
| 0.000067
| 0.258103
| 20,023
| 491
| 135
| 40.780041
| 0.816627
| 0.265844
| 0
| 0.753425
| 1
| 0
| 0.090562
| 0.002347
| 0
| 0
| 0
| 0
| 0
| 1
| 0.160959
| false
| 0.003425
| 0.017123
| 0
| 0.273973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7b06155d854f22833de37e86a8c8ea797b857ce
| 18,159
|
py
|
Python
|
unit_tests/test_check_ceph_status.py
|
wolsen/charm-ceph-mon
|
1e148346b7acb35d9ef38e495178b855c352a133
|
[
"ECL-2.0",
"Apache-2.0"
] | 17
|
2016-04-17T04:00:38.000Z
|
2021-01-18T16:09:54.000Z
|
unit_tests/test_check_ceph_status.py
|
wolsen/charm-ceph-mon
|
1e148346b7acb35d9ef38e495178b855c352a133
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2016-07-01T14:08:13.000Z
|
2021-08-20T06:22:20.000Z
|
unit_tests/test_check_ceph_status.py
|
wolsen/charm-ceph-mon
|
1e148346b7acb35d9ef38e495178b855c352a133
|
[
"ECL-2.0",
"Apache-2.0"
] | 19
|
2016-03-07T09:07:39.000Z
|
2021-10-29T18:03:24.000Z
|
# Copyright 2016 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import sys
from mock import patch
# import the module we want to test
os.sys.path.insert(1, os.path.join(sys.path[0], 'files/nagios'))
import check_ceph_status
@patch('subprocess.check_output')
class NagiosTestCase(unittest.TestCase):
def test_get_ceph_version(self, mock_subprocess):
mock_subprocess.return_value = 'ceph version 10.2.9 ' \
'(2ee413f77150c0f375ff6f10edd6c8f9c7d060d0)'.encode('UTF-8')
ceph_version = check_ceph_status.get_ceph_version()
self.assertEqual(ceph_version, [10, 2, 9])
# All OK, pre-luminoius
@patch('check_ceph_status.get_ceph_version')
def test_health_ok(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_ok.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--degraded_thresh', '1'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^All OK$")
# Warning, pre-luminous
@patch('check_ceph_status.get_ceph_version')
def test_health_warn(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_warn.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, health_critical status
@patch('check_ceph_status.get_ceph_version')
def test_health_err(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_crit.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, overall HEALTH_ERR
@patch('check_ceph_status.get_ceph_version')
def test_health_crit(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_error.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, because misplaced ratio is too big
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_misplaced(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_params.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--misplaced_thresh', '0.1'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, because recovery rate is too low
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_recovery(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_params.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--recovery_rate', '400'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Warning, pre-luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_health_warn_deepscrub(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_nodeepscrub.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_deepscrub(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_nodeepscrub.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--raise_nodeepscrub'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, pre-luminous, noout
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_noout(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_noout.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# All OK, luminous
@patch('check_ceph_status.get_ceph_version')
def test_health_ok_luminous(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_ok_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--degraded_thresh', '1'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^All OK$")
# Warning, luminous
@patch('check_ceph_status.get_ceph_version')
def test_health_warn_luminous(self, mock_ceph_version, mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_many_warnings_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, luminous, because of overall status
# Error, luminous, because misplaced ratio is too big
@patch('check_ceph_status.get_ceph_version')
def test_health_critical_misplaced_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_many_warnings_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--misplaced_thresh', '0.1'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, luminous, because degraded ratio is too big
@patch('check_ceph_status.get_ceph_version')
def test_health_critical_degraded_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_degraded_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--degraded_thresh', '0.1'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, luminous, because recovery rate is too low
@patch('check_ceph_status.get_ceph_version')
def test_health_critical_recovery_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_many_warnings_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--recovery_rate', '20'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Warning, luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_health_warn_deepscrub_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_nodeepscrub_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_deepscrub_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_nodeepscrub_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--raise_nodeepscrub'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Error, luminous, noout
@patch('check_ceph_status.get_ceph_version')
def test_health_crit_noout_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_noout_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args("")
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Additional Ok, luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_ok_deepscrub_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_nodeepscrub_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'osd out'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^All OK$")
# Additional warning, luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_warn_deepscrub_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_nodeepscrub_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'deep'])
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Additional error, luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_error_deepscrub_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_nodeepscrub_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'deep',
'--additional_check_critical'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Additional Ok, pre-luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_ok_deepscrub_pre_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_nodeepscrub.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'osd out'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^All OK$")
# Additional warning, pre-luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_warn_deepscrub_pre_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_nodeepscrub.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'deep'])
self.assertRaises(check_ceph_status.WarnError,
lambda: check_ceph_status.check_ceph_status(args))
# Additional error, pre-luminous, deepscrub
@patch('check_ceph_status.get_ceph_version')
def test_additional_error_deepscrub_pre_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_nodeepscrub.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--additional_check', 'deep',
'--additional_check_critical'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Num OSD OK, pre-luminous
@patch('check_ceph_status.get_ceph_version')
def test_num_osds_ok_pre_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_ok.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--check_num_osds'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^OK")
# Num OSD error, pre-luminous
@patch('check_ceph_status.get_ceph_version')
def test_num_osds_error_pre_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [10, 2, 9]
with open('unit_tests/ceph_warn.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--check_num_osds'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
# Num OSD OK, luminous
@patch('check_ceph_status.get_ceph_version')
def test_num_osds_ok_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_many_warnings_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--check_num_osds'])
check_output = check_ceph_status.check_ceph_status(args)
self.assertRegex(check_output, r"^OK")
# Num OSD error, luminous
@patch('check_ceph_status.get_ceph_version')
def test_num_osds_error_luminous(self,
mock_ceph_version,
mock_subprocess):
mock_ceph_version.return_value = [12, 2, 0]
with open('unit_tests/ceph_degraded_luminous.json') as f:
tree = f.read()
mock_subprocess.return_value = tree.encode('UTF-8')
args = check_ceph_status.parse_args(['--check_num_osds'])
self.assertRaises(check_ceph_status.CriticalError,
lambda: check_ceph_status.check_ceph_status(args))
| 49.479564
| 78
| 0.63346
| 2,198
| 18,159
| 4.878526
| 0.070064
| 0.109951
| 0.183251
| 0.06528
| 0.913457
| 0.910659
| 0.906836
| 0.906836
| 0.906836
| 0.906836
| 0
| 0.014457
| 0.272427
| 18,159
| 366
| 79
| 49.614754
| 0.797154
| 0.085688
| 0
| 0.834483
| 0
| 0
| 0.152597
| 0.117391
| 0
| 0
| 0
| 0
| 0.096552
| 1
| 0.096552
| false
| 0
| 0.017241
| 0
| 0.117241
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b7d75fae90757586b3adffc30a2209eab76f9862
| 808
|
py
|
Python
|
edm_web1/router.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | null | null | null |
edm_web1/router.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | 18
|
2020-06-05T18:17:40.000Z
|
2022-03-11T23:25:21.000Z
|
edm_web1/router.py
|
zhouli121018/nodejsgm
|
0ccbc8acf61badc812f684dd39253d55c99f08eb
|
[
"MIT"
] | null | null | null |
class MyRouter(object):
def db_for_read(self, model, **hints):
# if model.__name__ == 'CommonVar':
if model._meta.model_name == 'commontype':
return 'pgsql-ms'
if model._meta.app_label == 'other':
return 'pgsql-ms'
# elif model._meta.app_label in ['auth', 'admin', 'contenttypes', 'sesssions', 'django_weixin', 'tagging']:
# return 'default'
return 'mm-ms'
def db_for_write(self, model, **hints):
if model._meta.model_name == 'commontype':
return 'pgsql-ms'
if model._meta.app_label == 'other':
return 'pgsql-ms'
# elif model._meta.app_label in ['auth', 'admin', 'contenttypes', 'sesssions', 'django_weixin', 'tagging']:
# return 'default'
return 'mm-ms'
| 40.4
| 115
| 0.574257
| 93
| 808
| 4.752688
| 0.354839
| 0.122172
| 0.099548
| 0.153846
| 0.871041
| 0.791855
| 0.791855
| 0.791855
| 0.791855
| 0.791855
| 0
| 0
| 0.274752
| 808
| 19
| 116
| 42.526316
| 0.754266
| 0.355198
| 0
| 0.769231
| 0
| 0
| 0.139806
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0
| 0
| 0.692308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
4d33d7e6e7a43d4918d7e81f0ad39703caf62b51
| 35,641
|
py
|
Python
|
egret/model_library/unit_commitment/ramping_limits.py
|
austinshort/Egret
|
e1fe4ece9f524dcd76f77768cf0d8048dc2b9fd7
|
[
"BSD-3-Clause"
] | null | null | null |
egret/model_library/unit_commitment/ramping_limits.py
|
austinshort/Egret
|
e1fe4ece9f524dcd76f77768cf0d8048dc2b9fd7
|
[
"BSD-3-Clause"
] | 1
|
2019-12-11T22:45:12.000Z
|
2019-12-11T22:45:12.000Z
|
egret/model_library/unit_commitment/ramping_limits.py
|
austinshort/Egret
|
e1fe4ece9f524dcd76f77768cf0d8048dc2b9fd7
|
[
"BSD-3-Clause"
] | null | null | null |
# ___________________________________________________________________________
#
# EGRET: Electrical Grid Research and Engineering Tools
# Copyright 2019 National Technology & Engineering Solutions of Sandia, LLC
# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
# Government retains certain rights in this software.
# This software is distributed under the Revised BSD License.
# ___________________________________________________________________________
## file for ramping constraints
from pyomo.environ import *
import math
from .uc_utils import add_model_attr
component_name = 'ramping_limits'
## TODO: FIXME: FINISH CONVERTING RAMPING CONSTRAINTS
generation_limits_w_startup_shutdown = ['MLR_generation_limits',
'gentile_generation_limits',
'pan_guan_gentile_generation_limits',
'pan_guan_gentile_KOW_generation_limits',
]
## For safety, we will always enforce ramping limits
## at the initial time step
def _ramp_up_not_needed(m,g,t):
if t == m.InitialTime:
return False
if value(m.ScaledNominalRampUpLimit[g,t]) >= value(m.MaximumPowerOutput[g,t] - m.MinimumPowerOutput[g,t-1]) and m.generation_limits in generation_limits_w_startup_shutdown:
return True
return False
## In the case of ramp down, just if specified to
## by the model options
def _ramp_down_not_needed(m,g,t):
if t == m.InitialTime:
return not m.enforce_t1_ramp_rates
if value(m.ScaledNominalRampDownLimit[g,t]) >= value(m.MaximumPowerOutput[g,t-1] - m.MinimumPowerOutput[g,t]) and m.generation_limits in generation_limits_w_startup_shutdown:
return True
return False
def _model_time_invariant(m):
'''
A test for if certain parameters important to
ramping constraints are time invariant
'''
ft = m.InitialTime
for g in m.ThermalGenerators:
pmax = value(m.MaximumPowerOutput[g,ft])
pmin = value(m.MinimumPowerOutput[g,ft])
SU = value(m.ScaledStartupRampLimit[g,ft])
SD = value(m.ScaledShutdownRampLimit[g,ft])
RU = value(m.ScaledNominalRampUpLimit[g,ft])
RD = value(m.ScaledNominalRampDownLimit[g,ft])
for t in m.TimePeriods:
if t == ft:
continue
if pmax != value(m.MaximumPowerOutput[g,t]) or \
pmin != value(m.MinimumPowerOutput[g,t]) or \
SU != value(m.ScaledStartupRampLimit[g,t]) or \
SD != value(m.ScaledShutdownRampLimit[g,t]) or \
RU != value(m.ScaledNominalRampUpLimit[g,t]) or \
RD != value(m.ScaledNominalRampDownLimit[g,t]):
return False
return True
def _damcikurt_basic_ramping(model):
## NOTE: with the expression MaximumPowerAvailableAboveMinimum and PowerGeneratedAboveMinimum,
## these constraints are expressed as needed, there's no cancelation even though we end
## up using these expressions
def enforce_max_available_ramp_up_rates_rule(m, g, t):
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
# if the unit was on in t0, then it's m.PowerGeneratedT0[g] >= m.MinimumPowerOutput[g], and m.UnitOnT0 == 1
# if not, then m.UnitOnT0[g] == 0 and so (m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g]) * m.UnitOnT0[g] is 0
## assume m.MinimumPowerOutput[g,T0] == 0
return m.MaximumPowerAvailableAboveMinimum[g, t] - m.PowerGeneratedT0[g] <= \
(m.ScaledNominalRampUpLimit[g,t] + 0 - m.MinimumPowerOutput[g,t])*m.UnitOn[g,t] + \
(m.ScaledStartupRampLimit[g,t] - 0 - m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t]
else:
return m.MaximumPowerAvailableAboveMinimum[g, t] - m.PowerGeneratedAboveMinimum[g, t-1] <= \
(m.ScaledNominalRampUpLimit[g,t] + m.MinimumPowerOutput[g,t-1] - m.MinimumPowerOutput[g,t])*m.UnitOn[g,t] + \
(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t-1] - m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t]
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_max_available_ramp_up_rates_rule)
def enforce_ramp_down_limits_rule(m, g, t):
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
## assume m.MinimumPowerOutput[g,T0] == 0
return m.PowerGeneratedT0[g] - m.PowerGeneratedAboveMinimum[g, t] <= \
(m.ScaledNominalRampDownLimit[g,t] + m.MinimumPowerOutput[g,t] + 0)*m.UnitOnT0[g] + \
(m.ScaledShutdownRampLimitT0[g] - m.MinimumPowerOutput[g,t] - m.ScaledNominalRampDownLimit[g,t])*m.UnitStop[g,t]
## TODO: figure out ScaledShutdownRampLimitT0[g]
else:
return m.PowerGeneratedAboveMinimum[g, t-1] - m.PowerGeneratedAboveMinimum[g, t] <= \
(m.ScaledNominalRampDownLimit[g,t] + m.MinimumPowerOutput[g,t] - m.MinimumPowerOutput[g,t-1])*m.UnitOn[g,t-1] + \
(m.ScaledShutdownRampLimit[g,t-1] - m.MinimumPowerOutput[g,t] - m.ScaledNominalRampDownLimit[g,t])*m.UnitStop[g,t]
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
return
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': ['garver_3bin_vars','garver_2bin_vars', 'garver_3bin_relaxed_stop_vars', 'ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def damcikurt_ramping(model):
'''
Equations (3) and (18) from
Pelin Damci-Kurt, Simge Kucukyavuz, Deepak Rajan, and Alper Atamturk. A
polyhedral study of production ramping. Mathematical Programming,
158(1-2):175–205, 2016.
'''
_damcikurt_basic_ramping(model)
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': ['garver_3bin_vars','garver_2bin_vars', 'garver_3bin_relaxed_stop_vars', 'ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def damcikurt_ramping_2period(model):
'''
Equations (3) and (18), plus equations (20) and (23), from
Pelin Damci-Kurt, Simge Kucukyavuz, Deepak Rajan, and Alper Atamturk. A
polyhedral study of production ramping. Mathematical Programming,
158(1-2):175–205, 2016.
'''
_damcikurt_basic_ramping(model)
if not _model_time_invariant(model):
raise NotImplementedError("damcikurt_ramping_2period has not be extended to model time-varying minimum or maximum power")
#TODO: fix for time-varying SU/SD, Pmin, Pmax
def two_period_ramp_up_rule(m, g, t):
if value(m.ScaledStartupRampLimit[g,t]) < value(m.MinimumPowerOutput[g,t] + m.ScaledNominalRampUpLimit[g,t]):
return Constraint.Skip
j = math.floor(min(value(m.NumTimePeriods)-t, value(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t])/value(m.ScaledNominalRampUpLimit[g,t])))
if j > 1: ## j == 1 is handled above
return m.MaximumPowerAvailableAboveMinimum[g,t+j] - m.PowerGeneratedAboveMinimum[g,t] <= j*m.ScaledNominalRampUpLimit[g,t]*m.UnitOn[g,t+j] \
+ sum( min(value(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t] - i*m.ScaledNominalRampUpLimit[g,t]), \
value(m.MaximumPowerOutput[g,t] - m.MinimumPowerOutput[g,t] - j*m.ScaledNominalRampUpLimit[g,t]))*m.UnitStart[g,i] for i in range(1, j+1) )
return Constraint.Skip
model.EnforceTwoPeriodRampUpRule = Constraint(model.ThermalGenerators, model.TimePeriods, rule=two_period_ramp_up_rule)
def two_period_ramp_down_rule(m, g, t):
if value(m.ScaledShutdownRampLimit[g,t]) < value(m.MinimumPowerOutput[g,t] + m.ScaledNominalRampDownLimit[g,t]):
return Constraint.Skip
j = math.floor(min(value(m.NumTimePeriods)-t, value(m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t])/value(m.ScaledNominalRampDownLimit[g,t])))
if j > 1: ## j == 1 is handled above
return m.PowerGeneratedAboveMinimum[g,t] - m.PowerGeneratedAboveMinimum[g,t+j] <= j*m.ScaledNominalRampDownLimit[g,t]*m.UnitOn[g,t] \
+ sum( min(value(m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t] - (j-i+1)*m.ScaledNominalRampDownLimit[g,t]), \
value(m.MaximumPowerOutput[g,t] - m.MinimumPowerOutput[g,t] - j*m.ScaledNominalRampDownLimit[g,t]))*m.UnitStop[g,t+i] for i in range(1, j+1) )
return Constraint.Skip
model.EnforceTwoPeriodRampDownRule = Constraint(model.ThermalGenerators, model.TimePeriods, rule=two_period_ramp_down_rule)
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': ['ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def ALS_damcikurt_ramping(model):
'''
Equations (20a) and (20b) from
Semih Atakan, Guglielmo Lulli, and Suvrajeet Sen. A state transition MIP
formulation for the unit commitment problem. IEEE Transactions on Power
Systems, 33(1):736–748, 2018.
which are modifications of the damcikurt ramping limits.
'''
## TODO: Check math for these constrains under time-varying pmin/pmax
if not _model_time_invariant(model):
raise NotImplementedError("ALS_damcikurt_ramping has not be extended to model time-varying minimum or maximum power")
## NOTE: with the expression MaximumPowerAvailableAboveMinimum and PowerGeneratedAboveMinimum,
## these constraints are expressed as needed, there's no cancelation even though we end
## up using these expressions
def enforce_max_available_ramp_up_rates_rule(m, g, t):
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
# if the unit was on in t0, then it's m.PowerGeneratedT0[g] >= m.MinimumPowerOutput[g], and m.UnitOnT0 == 1
# if not, then m.UnitOnT0[g] == 0 and so (m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g]) * m.UnitOnT0[g] is 0
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g,t]*m.UnitOnT0[g] + \
(m.ScaledNominalRampUpLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStayOn[g,t] + \
m.ScaledStartupRampLimit[g,t]*m.UnitStart[g,t]
else:
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedAboveMinimum[g, t-1] + \
(m.ScaledNominalRampUpLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStayOn[g,t] + \
m.ScaledStartupRampLimit[g,t]*m.UnitStart[g,t]
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_max_available_ramp_up_rates_rule)
def enforce_ramp_down_limits_rule(m, g, t):
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
return m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g,t]*m.UnitOnT0[g] - m.PowerGeneratedAboveMinimum[g, t] <= \
m.ScaledNominalRampDownLimit[g,t]*m.UnitStayOn[g,t] + (m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t])*m.UnitStop[g,t]
else:
return m.PowerGeneratedAboveMinimum[g, t-1] - m.PowerGeneratedAboveMinimum[g, t] <= \
m.ScaledNominalRampDownLimit[g,t]*m.UnitStayOn[g,t] + (m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t])*m.UnitStop[g,t]
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
return
@add_model_attr(component_name, requires = {'data_loader': None,
'power_vars': None,
'reserve_vars': None,
'generation_limits':generation_limits_w_startup_shutdown,
})
def MLR_ramping(model):
'''
Equations (12) and (13) from
G. Morales-Espana, J. M. Latorre, and A. Ramos. Tight and compact MILP
formulation for the thermal unit commitment problem. IEEE Transactions on
Power Systems, 28(4):4897–4908, 2013.
with T0 ramp-down limit which is required to make this consistent with other
formulataions for ramping.
'''
# TODO: ADJUST FOR DIFFERING MINIMUM POWER OUTPUTS
if not _model_time_invariant(model):
raise NotImplementedError("MLR_ramping has not be extended to model time-varying minimum or maximum power")
# the following constraint encodes Constraint 12 defined in ME
def enforce_max_available_ramp_up_rates_rule(m, g, t):
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
# if the unit was on in t0, then it's m.PowerGeneratedT0[g] >= m.MinimumPowerOutput[g], and m.UnitOnT0 == 1
# if not, then m.UnitOnT0[g] == 0 and so (m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g]) * m.UnitOnT0[g] is 0
return m.MaximumPowerAvailableAboveMinimum[g, t] <= m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g,t]*m.UnitOnT0[g] + \
m.ScaledNominalRampUpLimit[g,t]
else:
return m.MaximumPowerAvailableAboveMinimum[g, t] <= m.PowerGeneratedAboveMinimum[g, t-1] + m.ScaledNominalRampUpLimit[g,t]
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_max_available_ramp_up_rates_rule)
# the following constraint encodes Constraint 13 defined in ME
def enforce_ramp_down_limits_rule(m, g, t):
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
return m.PowerGeneratedT0[g] - m.MinimumPowerOutput[g,t]*m.UnitOnT0[g] - m.PowerGeneratedAboveMinimum[g, t] <= \
m.ScaledNominalRampDownLimit[g,t]
else:
return m.PowerGeneratedAboveMinimum[g, t-1] - m.PowerGeneratedAboveMinimum[g, t] <= \
m.ScaledNominalRampDownLimit[g,t]
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
## need this so we agree with the other ramping models when using MLR Ramping
## (i.e., can't shutdown at t=1 unless we're below ScaledShutdownRampLimit)
def power_limit_t0_stop_rule(m,g):
if not m.enforce_t1_ramp_rates:
return Constraint.Skip
else:
t = m.InitialTime
return m.PowerGeneratedT0[g] <= (m.MaximumPowerOutput[g,t])*m.UnitOnT0[g] \
- (m.MaximumPowerOutput[g,t] - m.ScaledShutdownRampLimit[g,t])*m.UnitStop[g,t]
model.power_limit_t0_stop = Constraint(model.ThermalGenerators,rule=power_limit_t0_stop_rule)
return
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': ['garver_3bin_vars','garver_2bin_vars','garver_3bin_relaxed_stop_vars', 'ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def arroyo_conejo_ramping(model):
'''
equations (17) and (18) from
J.M. Arroyo and A.J. Conejo, Optimal Response of a Thermal Unit
to an Electricity Spot Market, IEEE Transactions on Power Systems
Vol. 15, No. 3, Aug 2000
'''
# impose upper bounds on the maximum power available for each generator in each time period,
# based on standard and start-up ramp limits.
# the following constraint encodes Constraint 6 defined in OAV
def enforce_max_available_ramp_up_rates_rule(m, g, t):
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
# 4 cases, split by (t-1, t) unit status (RHS is defined as the delta from m.PowerGenerated[g, t-1])
if t == m.InitialTime:
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedT0[g] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOnT0[g] + \
m.ScaledStartupRampLimit[g,t] * m.UnitStart[g, t]
else:
return m.MaximumPowerAvailable[g, t] <= m.PowerGenerated[g, t-1] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g, t-1] + \
m.ScaledStartupRampLimit[g,t] * m.UnitStart[g,t]
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_max_available_ramp_up_rates_rule)
# the following constraint encodes Constraint 7 defined in OAV
def enforce_ramp_down_limits_rule(m, g, t):
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
return m.PowerGeneratedT0[g] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimitT0[g] * m.UnitStop[g, t]
else:
return m.PowerGenerated[g, t-1] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimit[g,t-1] * m.UnitStop[g, t]
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
def _OAV_enhanced(model):
'''
baseline for the OAV enhanced formulations
'''
## TODO : TIME-VARYING Pmin/Pmax
# the following constraint encodes Constraint 23 defined in OAV
def enforce_ramp_up_limits_rule(m, g, t):
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
if (value(m.ScaledNominalRampUpLimit[g,t]) > value(m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t])) \
and (value(m.ScaledMinimumUpTime[g]) >= 2):
if t == m.InitialTime:
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedT0[g] \
+ m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g,t] \
+ (m.ScaledStartupRampLimit[g,t]-m.ScaledNominalRampUpLimit[g,t]) * m.UnitStart[g, t] \
- m.MinimumPowerOutput[g,t]*m.UnitStop[g,t] \
- (m.ScaledNominalRampUpLimit[g,t]- m.ScaledShutdownRampLimit[g,t] + m.MinimumPowerOutput[g,t])*m.UnitStop[g,t+1]
if t >= value(m.NumTimePeriods):
return m.MaximumPowerAvailable[g, t] <= m.PowerGenerated[g,t-1] \
+ m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g,t] \
+ (m.ScaledStartupRampLimit[g,t]-m.ScaledNominalRampUpLimit[g,t]) * m.UnitStart[g, t] \
- m.MinimumPowerOutput[g,t]*m.UnitStop[g,t]
else:
return m.MaximumPowerAvailable[g, t] <= m.PowerGenerated[g,t-1] \
+ m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g,t] \
+ (m.ScaledStartupRampLimit[g,t]-m.ScaledNominalRampUpLimit[g,t]) * m.UnitStart[g, t] \
- m.MinimumPowerOutput[g,t]*m.UnitStop[g,t] \
- (m.ScaledNominalRampUpLimit[g,t]- m.ScaledShutdownRampLimit[g,t] + m.MinimumPowerOutput[g,t])*m.UnitStop[g,t+1]
else:
if t == m.InitialTime:
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedT0[g] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOnT0[g] + \
m.ScaledStartupRampLimit[g,t] * m.UnitStart[g, t]
else:
return m.MaximumPowerAvailable[g, t] <= m.PowerGenerated[g, t-1] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g, t-1] + \
m.ScaledStartupRampLimit[g,t] * m.UnitStart[g,t]
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_up_limits_rule)
# the following constraint encodes Constraint 7, 20, 21 defined in OAV
def enforce_ramp_down_limits_rule(m, g, t):
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
## equation 7
if (value(m.ScaledNominalRampDownLimit[g,t]) <= value(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t])) \
or (value(m.ScaledMinimumUpTime[g]) < 2):
return m.PowerGeneratedT0[g] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t]
elif value(m.ScaledMinimumUpTime[g]) < 3 or value(m.ScaledMinimumDownTime[g]) < 2: # now we can use equation 20
return m.PowerGeneratedT0[g] - m.PowerGenerated[g, t] <= \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] \
+ m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t] \
- (m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStart[g,t]
else: # we can use equation (21)
return m.PowerGeneratedT0[g] - m.PowerGenerated[g, t] <= \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t+1] \
+ m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t] \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitStop[g,t+1] \
-(m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t]) * m.UnitStart[g,t] \
- m.ScaledNominalRampDownLimit[g,t] * m.UnitStart[g,t+1]
else:
## equation 7
if (value(m.ScaledNominalRampDownLimit[g,t]) <= value(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t])) \
or (value(m.ScaledMinimumUpTime[g]) < 2):
return m.PowerGenerated[g,t-1] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t]
elif value(m.ScaledMinimumUpTime[g]) < 3 or value(m.ScaledMinimumDownTime[g]) < 2 or t >= value(m.NumTimePeriods): # now we can use equation 20
return m.PowerGenerated[g,t-1] - m.PowerGenerated[g, t] <= \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] \
+ m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t] \
-(m.ScaledNominalRampDownLimit[g,t]-m.ScaledStartupRampLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStart[g,t-1] \
- (m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStart[g,t]
else: # we can use equation (21)
return m.PowerGenerated[g,t-1] - m.PowerGenerated[g, t] <= \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t+1] \
+ m.ScaledShutdownRampLimit[g,t] * m.UnitStop[g, t] \
+ m.ScaledNominalRampDownLimit[g,t] * m.UnitStop[g,t+1] \
-(m.ScaledNominalRampDownLimit[g,t]-m.ScaledStartupRampLimit[g,t]+m.MinimumPowerOutput[g,t])*m.UnitStart[g,t-1] \
-(m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t]) * m.UnitStart[g,t] \
- m.ScaledNominalRampDownLimit[g,t] * m.UnitStart[g,t+1]
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
## TODO: These should really be refactored so we don't double- or triple-up on ramping limits
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': None,
'status_vars': ['garver_3bin_vars','garver_2bin_vars', 'garver_3bin_relaxed_stop_vars', 'ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def OAV_ramping_enhanced(model):
'''
Equations (6),(7),(20),(21),(23) from
Ostrowski, J., et. al. Tight Mixed Integer Linear Programming Formulations
for the Unit Commitment Problem. IEEE Transactions on Power Systems,
Vol. 27, No. 1, Feb 2012.
We only add the strongest valid ramp-up or ramp-down equality we can,
and discard the others
'''
## TODO: check/figure out math for this case
if not _model_time_invariant(model):
raise NotImplementedError("OAV_ramping_enhanced has not be extended to model time-varying minimum or maximum power")
_OAV_enhanced(model)
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': ['garver_3bin_vars','garver_2bin_vars', 'garver_3bin_relaxed_stop_vars', 'ALS_state_transition_vars'],
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def OAV_ramping_enhanced_2period(model):
'''
This is OAV_ramping_enhanced plus the two-period ramping inequalities
in equations (22) and (24) from
Ostrowski, J., et. al. Tight Mixed Integer Linear Programming Formulations
for the Unit Commitment Problem. IEEE Transactions on Power Systems,
Vol. 27, No. 1, Feb 2012.
'''
## TODO: check/figure out math for this case
if not _model_time_invariant(model):
raise NotImplementedError("OAV_ramping_enhanced_2period has not be extended to model time-varying minimum or maximum power")
#TODO: This isn't quite valid, needs debugging
_OAV_enhanced(model)
## TODO: this shouldn't be necessary, and the MaximumPowerAvailable
## should be on the LHS of these equations
def OAV_two_period_ramp_up_rule(m,g,t):
if 2*value(m.ScaledNominalRampUpLimit[g,t]) >= value(m.MaximumPowerOutput[g,t] - m.MinimumPowerOutput[g,t]):
return Constraint.Skip
if (value(m.ScaledNominalRampUpLimit[g,t]) > value(m.ScaledShutdownRampLimit[g,t] - m.MinimumPowerOutput[g,t])) \
and (value(m.ScaledMinimumUpTime[g]) >= 2) and (value(m.ScaledMinimumDownTime[g]) >= 2) \
and (t > value(m.InitialTime)):
if t == value(m.InitialTime) + 1: ## t == 2
return m.MaximumPowerAvailable[g,t] - m.PowerGeneratedT0[g] <= \
2 * m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g,t] \
- m.MinimumPowerOutput[g,t]*(m.UnitStop[g,t-1]+m.UnitStop[g,t]) \
+ (m.ScaledStartupRampLimit[g,t] - m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t-1] \
+ (m.ScaledStartupRampLimit[g,t] - 2*m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t]
else:
return m.MaximumPowerAvailable[g,t] - m.PowerGenerated[g,t-2] <= \
2 * m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g,t] \
- m.MinimumPowerOutput[g,t]*(m.UnitStop[g,t-1]+m.UnitStop[g,t]) \
+ (m.ScaledStartupRampLimit[g,t] - m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t-1] \
+ (m.ScaledStartupRampLimit[g,t] - 2*m.ScaledNominalRampUpLimit[g,t])*m.UnitStart[g,t]
else:
return Constraint.Skip
model.OAVTwoPeriodRampUp = Constraint(model.ThermalGenerators, model.TimePeriods, rule=OAV_two_period_ramp_up_rule)
## NOTE: in the text this doesn't have any conditions on when it is valid,
## so the valid conditions were inferred
def OAV_two_period_ramp_down_rule(m,g,t):
if 2*value(m.ScaledNominalRampDownLimit[g,t]) >= value(m.MaximumPowerOutput[g,t] - m.MinimumPowerOutput[g,t]):
return Constraint.Skip
if (value(m.ScaledNominalRampDownLimit[g,t]) > value(m.ScaledStartupRampLimit[g,t] - m.MinimumPowerOutput[g,t])) \
and (value(m.ScaledMinimumUpTime[g]) >= 3) and (value(m.ScaledMinimumDownTime[g]) >= 2) \
and (t > value(m.InitialTime)):
if t == value(m.InitialTime) + 1: ## t == 2
if not m.enforce_t1_ramp_rates:
return Constraint.Skip
return m.PowerGeneratedT0[g] - m.PowerGenerated[g,t] <= \
2*m.ScaledNominalRampDownLimit[g,t]*m.UnitOn[g,t] \
+ m.ScaledShutdownRampLimit[g,t]*m.UnitStop[g,t-1] \
+ (m.ScaledShutdownRampLimit[g,t]+m.ScaledNominalRampDownLimit[g,t])*m.UnitStop[g,t] \
-(2*m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t])*(m.UnitStart[g,t-1]+m.UnitStart[g,t])
else:
return m.PowerGenerated[g,t-2] - m.PowerGenerated[g,t] <= \
2*m.ScaledNominalRampDownLimit[g,t]*m.UnitOn[g,t] \
+ m.ScaledShutdownRampLimit[g,t]*m.UnitStop[g,t-1] \
+ (m.ScaledShutdownRampLimit[g,t]+m.ScaledNominalRampDownLimit[g,t])*m.UnitStop[g,t] \
- 2*m.ScaledNominalRampDownLimit[g,t]*m.UnitStart[g,t-2] \
-(2*m.ScaledNominalRampDownLimit[g,t]+m.MinimumPowerOutput[g,t])*(m.UnitStart[g,t-1]+m.UnitStart[g,t])
else:
return Constraint.Skip
model.OAVTwoPeriodRampDown = Constraint(model.ThermalGenerators, model.TimePeriods, rule=OAV_two_period_ramp_down_rule)
@add_model_attr(component_name, requires = {'data_loader': None,
'status_vars': None,
'power_vars': None,
'reserve_vars': None,
'generation_limits':None,
})
def CA_ramping_limits(model):
'''
Equations (18),(19) and (20) from
Carrion, M. and Arroyo, J. (2006) A Computationally Efficient Mixed-Integer
Liner Formulation for the Thermal Unit Commitment Problem. IEEE Transactions
on Power Systems, Vol. 21, No. 3, Aug 2006.
'''
# impose upper bounds on the maximum power available for each generator in each time period,
# based on standard and start-up ramp limits.
# the following constraint encodes Constraint 18 defined in Carrion and Arroyo.
def enforce_max_available_ramp_up_rates_rule(m, g, t):
# 4 cases, split by (t-1, t) unit status (RHS is defined as the delta from m.PowerGenerated[g, t-1])
# (0, 0) - unit staying off: RHS = maximum generator output (degenerate upper bound due to unit being off)
# (0, 1) - unit switching on: RHS = startup ramp limit
# (1, 0) - unit switching off: RHS = standard ramp limit minus startup ramp limit plus maximum power output (degenerate upper bound due to unit off)
# (1, 1) - unit staying on: RHS = standard ramp limit plus power generated in previous time period
if _ramp_up_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
return m.MaximumPowerAvailable[g, t] <= m.PowerGeneratedT0[g] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOnT0[g] + \
m.ScaledStartupRampLimit[g,t] * (m.UnitOn[g, t] - m.UnitOnT0[g]) + \
m.MaximumPowerOutput[g,t] * (1 - m.UnitOn[g, t])
else:
return m.MaximumPowerAvailable[g, t] <= m.PowerGenerated[g, t-1] + \
m.ScaledNominalRampUpLimit[g,t] * m.UnitOn[g, t-1] + \
m.ScaledStartupRampLimit[g,t] * (m.UnitOn[g, t] - m.UnitOn[g, t-1]) + \
m.MaximumPowerOutput[g,t] * (1 - m.UnitOn[g, t])
model.EnforceMaxAvailableRampUpRates = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_max_available_ramp_up_rates_rule)
# the following constraint encodes Constraint 20 defined in Carrion and Arroyo.
def enforce_ramp_down_limits_rule(m, g, t):
# 4 cases, split by (t-1, t) unit status:
# (0, 0) - unit staying off: RHS = maximum generator output (degenerate upper bound)
# (0, 1) - unit switching on: RHS = standard ramp-down limit minus shutdown ramp limit plus maximum generator output - this is the strangest case.
#NOTE: This may never be physically true, but if a generator has ScaledShutdownRampLimit >> MaximumPowerOutput, this constraint causes problems
# (1, 0) - unit switching off: RHS = shutdown ramp limit
# (1, 1) - unit staying on: RHS = standard ramp-down limit
if _ramp_down_not_needed(m,g,t):
return Constraint.Skip
if t == m.InitialTime:
return m.PowerGeneratedT0[g] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimitT0[g] * (m.UnitOnT0[g] - m.UnitOn[g, t]) + \
m.MaximumPowerOutput[g,t] * (1 - m.UnitOnT0[g])
else:
return m.PowerGenerated[g, t-1] - m.PowerGenerated[g, t] <= \
m.ScaledNominalRampDownLimit[g,t] * m.UnitOn[g, t] + \
m.ScaledShutdownRampLimit[g,t-1] * (m.UnitOn[g, t-1] - m.UnitOn[g, t]) + \
m.MaximumPowerOutput[g,t] * (1 - m.UnitOn[g, t-1])
model.EnforceScaledNominalRampDownLimits = Constraint(model.ThermalGenerators, model.TimePeriods, rule=enforce_ramp_down_limits_rule)
| 58.813531
| 178
| 0.598384
| 4,155
| 35,641
| 4.990373
| 0.095307
| 0.036749
| 0.030528
| 0.050639
| 0.858789
| 0.815481
| 0.798698
| 0.788184
| 0.751242
| 0.720087
| 0
| 0.014983
| 0.292135
| 35,641
| 605
| 179
| 58.910744
| 0.80673
| 0.194467
| 0
| 0.702632
| 0
| 0
| 0.053494
| 0.017196
| 0
| 0
| 0
| 0.013223
| 0
| 1
| 0.078947
| false
| 0
| 0.007895
| 0
| 0.273684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4d545e5f54326d14388a022df7aa12f65f61c3be
| 19,244
|
py
|
Python
|
stage/configuration/test_kafka_multitopic_consumer_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_kafka_multitopic_consumer_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1
|
2019-04-24T11:06:38.000Z
|
2019-04-24T11:06:38.000Z
|
stage/configuration/test_kafka_multitopic_consumer_origin.py
|
anubandhan/datacollector-tests
|
301c024c66d68353735256b262b681dd05ba16cc
|
[
"Apache-2.0"
] | 2
|
2019-05-24T06:34:37.000Z
|
2020-03-30T11:48:18.000Z
|
import pytest
from streamsets.testframework.decorators import stub
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('header_line', ['WITH_HEADER'])
@pytest.mark.parametrize('allow_extra_columns', [False, True])
@stub
def test_allow_extra_columns(sdc_builder, sdc_executor, data_format, header_line, allow_extra_columns):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['COLLECTD'])
@stub
def test_auth_file(sdc_builder, sdc_executor, data_format, datagram_packet_format):
pass
@pytest.mark.parametrize('auto_offset_reset', ['EARLIEST', 'LATEST', 'NONE', 'TIMESTAMP'])
@stub
def test_auto_offset_reset(sdc_builder, sdc_executor, auto_offset_reset):
pass
@pytest.mark.parametrize('auto_offset_reset', ['TIMESTAMP'])
@stub
def test_auto_offset_reset_timestamp_in_ms(sdc_builder, sdc_executor, auto_offset_reset):
pass
@pytest.mark.parametrize('avro_schema_location', ['INLINE'])
@pytest.mark.parametrize('data_format', ['AVRO'])
@stub
def test_avro_schema(sdc_builder, sdc_executor, avro_schema_location, data_format):
pass
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('avro_schema_location', ['INLINE', 'REGISTRY', 'SOURCE'])
@stub
def test_avro_schema_location(sdc_builder, sdc_executor, data_format, avro_schema_location):
pass
@stub
def test_batch_wait_time_in_ms(sdc_builder, sdc_executor):
pass
@stub
def test_broker_uri(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('data_format', ['WHOLE_FILE'])
@stub
def test_buffer_size_in_bytes(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM', 'DELIMITED', 'JSON', 'LOG', 'TEXT', 'XML'])
@stub
def test_charset(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM'])
@pytest.mark.parametrize('enable_comments', [True])
@stub
def test_comment_marker(sdc_builder, sdc_executor, data_format, delimiter_format_type, enable_comments):
pass
@pytest.mark.parametrize('data_format', ['BINARY', 'DELIMITED', 'JSON', 'LOG', 'PROTOBUF', 'SDC_JSON', 'TEXT', 'XML'])
@pytest.mark.parametrize('compression_format', ['ARCHIVE', 'COMPRESSED_ARCHIVE', 'COMPRESSED_FILE', 'NONE'])
@stub
def test_compression_format(sdc_builder, sdc_executor, data_format, compression_format):
pass
@stub
def test_configuration_properties(sdc_builder, sdc_executor):
pass
@stub
def test_consumer_group(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['COLLECTD'])
@pytest.mark.parametrize('convert_hi_res_time_and_interval', [False, True])
@stub
def test_convert_hi_res_time_and_interval(sdc_builder, sdc_executor, data_format, datagram_packet_format, convert_hi_res_time_and_interval):
pass
@pytest.mark.parametrize('data_format', ['TEXT'])
@pytest.mark.parametrize('use_custom_delimiter', [True])
@stub
def test_custom_delimiter(sdc_builder, sdc_executor, data_format, use_custom_delimiter):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['LOG4J'])
@pytest.mark.parametrize('use_custom_log_format', [True])
@stub
def test_custom_log4j_format(sdc_builder, sdc_executor, data_format, log_format, use_custom_log_format):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['APACHE_CUSTOM_LOG_FORMAT'])
@stub
def test_custom_log_format(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('data_format', ['AVRO', 'BINARY', 'DATAGRAM', 'DELIMITED', 'JSON', 'LOG', 'PROTOBUF', 'SDC_JSON', 'TEXT', 'XML'])
@stub
def test_data_format(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['COLLECTD', 'NETFLOW', 'RAW_DATA', 'SYSLOG'])
@stub
def test_datagram_packet_format(sdc_builder, sdc_executor, data_format, datagram_packet_format):
pass
@pytest.mark.parametrize('data_format', ['PROTOBUF'])
@pytest.mark.parametrize('delimited_messages', [False, True])
@stub
def test_delimited_messages(sdc_builder, sdc_executor, data_format, delimited_messages):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM'])
@stub
def test_delimiter_character(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['XML'])
@stub
def test_delimiter_element(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CSV', 'CUSTOM', 'EXCEL', 'MULTI_CHARACTER', 'MYSQL', 'POSTGRES_CSV', 'POSTGRES_TEXT', 'RFC4180', 'TDF'])
@stub
def test_delimiter_format_type(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM'])
@pytest.mark.parametrize('enable_comments', [False, True])
@stub
def test_enable_comments(sdc_builder, sdc_executor, data_format, delimiter_format_type, enable_comments):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM', 'MULTI_CHARACTER'])
@stub
def test_escape_character(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['EXCEL'])
@pytest.mark.parametrize('excel_header_option', ['IGNORE_HEADER', 'NO_HEADER', 'WITH_HEADER'])
@stub
def test_excel_header_option(sdc_builder, sdc_executor, data_format, excel_header_option):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['COLLECTD'])
@pytest.mark.parametrize('exclude_interval', [False, True])
@stub
def test_exclude_interval(sdc_builder, sdc_executor, data_format, datagram_packet_format, exclude_interval):
pass
@pytest.mark.parametrize('allow_extra_columns', [True])
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('header_line', ['WITH_HEADER'])
@stub
def test_extra_column_prefix(sdc_builder, sdc_executor, allow_extra_columns, data_format, header_line):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['REGEX'])
@stub
def test_field_path_to_regex_group_mapping(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('compression_format', ['ARCHIVE', 'COMPRESSED_ARCHIVE'])
@pytest.mark.parametrize('data_format', ['BINARY', 'DELIMITED', 'JSON', 'LOG', 'PROTOBUF', 'SDC_JSON', 'TEXT', 'XML'])
@stub
def test_file_name_pattern_within_compressed_directory(sdc_builder, sdc_executor, compression_format, data_format):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['GROK'])
@stub
def test_grok_pattern(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['GROK'])
@stub
def test_grok_pattern_definition(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('header_line', ['IGNORE_HEADER', 'NO_HEADER', 'WITH_HEADER'])
@stub
def test_header_line(sdc_builder, sdc_executor, data_format, header_line):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM', 'DELIMITED', 'JSON', 'LOG', 'TEXT', 'XML'])
@pytest.mark.parametrize('ignore_control_characters', [False, True])
@stub
def test_ignore_control_characters(sdc_builder, sdc_executor, data_format, ignore_control_characters):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM'])
@pytest.mark.parametrize('ignore_empty_lines', [False, True])
@stub
def test_ignore_empty_lines(sdc_builder, sdc_executor, data_format, delimiter_format_type, ignore_empty_lines):
pass
@pytest.mark.parametrize('data_format', ['EXCEL'])
@pytest.mark.parametrize('read_all_sheets', [False])
@stub
def test_import_sheets(sdc_builder, sdc_executor, data_format, read_all_sheets):
pass
@pytest.mark.parametrize('data_format', ['TEXT'])
@pytest.mark.parametrize('use_custom_delimiter', [True])
@pytest.mark.parametrize('include_custom_delimiter', [False, True])
@stub
def test_include_custom_delimiter(sdc_builder, sdc_executor, data_format, use_custom_delimiter, include_custom_delimiter):
pass
@pytest.mark.parametrize('data_format', ['XML'])
@pytest.mark.parametrize('include_field_xpaths', [False, True])
@stub
def test_include_field_xpaths(sdc_builder, sdc_executor, data_format, include_field_xpaths):
pass
@pytest.mark.parametrize('data_format', ['JSON'])
@pytest.mark.parametrize('json_content', ['ARRAY_OBJECTS', 'MULTIPLE_OBJECTS'])
@stub
def test_json_content(sdc_builder, sdc_executor, data_format, json_content):
pass
@pytest.mark.parametrize('key_capture_mode', ['RECORD_FIELD', 'RECORD_HEADER_AND_FIELD'])
@stub
def test_key_capture_field(sdc_builder, sdc_executor, key_capture_mode):
pass
@pytest.mark.parametrize('key_capture_mode', ['RECORD_HEADER', 'RECORD_HEADER_AND_FIELD'])
@stub
def test_key_capture_header_attribute(sdc_builder, sdc_executor, key_capture_mode):
pass
@pytest.mark.parametrize('key_capture_mode', ['NONE', 'RECORD_FIELD', 'RECORD_HEADER', 'RECORD_HEADER_AND_FIELD'])
@stub
def test_key_capture_mode(sdc_builder, sdc_executor, key_capture_mode):
pass
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('key_deserializer', ['CONFLUENT', 'STRING'])
@stub
def test_key_deserializer(sdc_builder, sdc_executor, data_format, key_deserializer):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@stub
def test_lines_to_skip(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['APACHE_CUSTOM_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'CEF', 'COMBINED_LOG_FORMAT', 'COMMON_LOG_FORMAT', 'GROK', 'LEEF', 'LOG4J', 'REGEX'])
@stub
def test_log_format(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('avro_schema_location', ['REGISTRY'])
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('lookup_schema_by', ['AUTO', 'ID', 'SUBJECT'])
@stub
def test_lookup_schema_by(sdc_builder, sdc_executor, avro_schema_location, data_format, lookup_schema_by):
pass
@stub
def test_max_batch_size_in_records(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('data_format', ['BINARY'])
@stub
def test_max_data_size_in_bytes(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['LOG', 'TEXT'])
@stub
def test_max_line_length(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['JSON'])
@stub
def test_max_object_length_in_chars(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED', 'XML'])
@stub
def test_max_record_length_in_chars(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['NETFLOW'])
@stub
def test_max_templates_in_cache_for_data_format_netflow(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['NETFLOW'])
@stub
def test_max_templates_in_cache(sdc_builder, sdc_executor, data_format, datagram_packet_format):
pass
@pytest.mark.parametrize('data_format', ['PROTOBUF'])
@stub
def test_message_type(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['MULTI_CHARACTER'])
@stub
def test_multi_character_field_delimiter(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['MULTI_CHARACTER'])
@stub
def test_multi_character_line_delimiter(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['XML'])
@stub
def test_namespaces(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('parse_nulls', [True])
@stub
def test_null_constant(sdc_builder, sdc_executor, data_format, parse_nulls):
pass
@stub
def test_number_of_threads(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['LOG4J'])
@pytest.mark.parametrize('on_parse_error', ['ERROR', 'IGNORE', 'INCLUDE_AS_STACK_TRACE'])
@stub
def test_on_parse_error(sdc_builder, sdc_executor, data_format, log_format, on_parse_error):
pass
@pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR'])
@stub
def test_on_record_error(sdc_builder, sdc_executor, on_record_error):
pass
@pytest.mark.parametrize('data_format', ['XML'])
@pytest.mark.parametrize('output_field_attributes', [False, True])
@stub
def test_output_field_attributes(sdc_builder, sdc_executor, data_format, output_field_attributes):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('parse_nulls', [False, True])
@stub
def test_parse_nulls(sdc_builder, sdc_executor, data_format, parse_nulls):
pass
@pytest.mark.parametrize('produce_single_record', [False, True])
@stub
def test_produce_single_record(sdc_builder, sdc_executor, produce_single_record):
pass
@pytest.mark.parametrize('data_format', ['PROTOBUF'])
@stub
def test_protobuf_descriptor_file(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('delimiter_format_type', ['CUSTOM', 'MULTI_CHARACTER'])
@stub
def test_quote_character(sdc_builder, sdc_executor, data_format, delimiter_format_type):
pass
@pytest.mark.parametrize('data_format', ['WHOLE_FILE'])
@stub
def test_rate_per_second(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['EXCEL'])
@pytest.mark.parametrize('read_all_sheets', [False, True])
@stub
def test_read_all_sheets(sdc_builder, sdc_executor, data_format, read_all_sheets):
pass
@pytest.mark.parametrize('data_format', ['NETFLOW'])
@pytest.mark.parametrize('record_generation_mode', ['INTERPRETED_ONLY', 'RAW_AND_INTERPRETED', 'RAW_ONLY'])
@stub
def test_record_generation_mode_for_data_format_netflow(sdc_builder, sdc_executor, data_format, record_generation_mode):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['NETFLOW'])
@pytest.mark.parametrize('record_generation_mode', ['INTERPRETED_ONLY', 'RAW_AND_INTERPRETED', 'RAW_ONLY'])
@stub
def test_record_generation_mode(sdc_builder, sdc_executor, data_format, datagram_packet_format, record_generation_mode):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['REGEX'])
@stub
def test_regular_expression(sdc_builder, sdc_executor, data_format, log_format):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('retain_original_line', [False, True])
@stub
def test_retain_original_line(sdc_builder, sdc_executor, data_format, retain_original_line):
pass
@pytest.mark.parametrize('data_format', ['DELIMITED'])
@pytest.mark.parametrize('root_field_type', ['LIST', 'LIST_MAP'])
@stub
def test_root_field_type(sdc_builder, sdc_executor, data_format, root_field_type):
pass
@pytest.mark.parametrize('avro_schema_location', ['REGISTRY'])
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('lookup_schema_by', ['ID'])
@stub
def test_schema_id(sdc_builder, sdc_executor, avro_schema_location, data_format, lookup_schema_by):
pass
@pytest.mark.parametrize('avro_schema_location', ['REGISTRY'])
@pytest.mark.parametrize('data_format', ['AVRO'])
@stub
def test_schema_registry_urls(sdc_builder, sdc_executor, avro_schema_location, data_format):
pass
@pytest.mark.parametrize('avro_schema_location', ['REGISTRY'])
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('lookup_schema_by', ['SUBJECT'])
@stub
def test_schema_subject(sdc_builder, sdc_executor, avro_schema_location, data_format, lookup_schema_by):
pass
@pytest.mark.parametrize('data_format', ['EXCEL'])
@pytest.mark.parametrize('excel_header_option', ['WITH_HEADER'])
@pytest.mark.parametrize('skip_cells_with_no_header', [False, True])
@stub
def test_skip_cells_with_no_header(sdc_builder, sdc_executor, data_format, excel_header_option, skip_cells_with_no_header):
pass
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('skip_union_indexes', [False, True])
@stub
def test_skip_union_indexes(sdc_builder, sdc_executor, data_format, skip_union_indexes):
pass
@pytest.mark.parametrize('data_format', ['NETFLOW'])
@stub
def test_template_cache_timeout_in_ms_for_data_format_netflow(sdc_builder, sdc_executor, data_format):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['NETFLOW'])
@stub
def test_template_cache_timeout_in_ms(sdc_builder, sdc_executor, data_format, datagram_packet_format):
pass
@stub
def test_topic_list(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['LOG4J'])
@pytest.mark.parametrize('on_parse_error', ['INCLUDE_AS_STACK_TRACE'])
@stub
def test_trim_stack_trace_to_length(sdc_builder, sdc_executor, data_format, log_format, on_parse_error):
pass
@pytest.mark.parametrize('data_format', ['DATAGRAM'])
@pytest.mark.parametrize('datagram_packet_format', ['COLLECTD'])
@stub
def test_typesdb_file_path(sdc_builder, sdc_executor, data_format, datagram_packet_format):
pass
@pytest.mark.parametrize('data_format', ['TEXT'])
@pytest.mark.parametrize('use_custom_delimiter', [False, True])
@stub
def test_use_custom_delimiter(sdc_builder, sdc_executor, data_format, use_custom_delimiter):
pass
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['LOG4J'])
@pytest.mark.parametrize('use_custom_log_format', [False, True])
@stub
def test_use_custom_log_format(sdc_builder, sdc_executor, data_format, log_format, use_custom_log_format):
pass
@pytest.mark.parametrize('data_format', ['AVRO'])
@pytest.mark.parametrize('value_deserializer', ['CONFLUENT', 'DEFAULT'])
@stub
def test_value_deserializer(sdc_builder, sdc_executor, data_format, value_deserializer):
pass
@pytest.mark.parametrize('data_format', ['WHOLE_FILE'])
@pytest.mark.parametrize('verify_checksum', [False, True])
@stub
def test_verify_checksum(sdc_builder, sdc_executor, data_format, verify_checksum):
pass
| 32.019967
| 180
| 0.778269
| 2,554
| 19,244
| 5.48473
| 0.079092
| 0.112079
| 0.235366
| 0.131925
| 0.838806
| 0.806396
| 0.764492
| 0.717947
| 0.693604
| 0.657696
| 0
| 0.000565
| 0.0807
| 19,244
| 600
| 181
| 32.073333
| 0.79125
| 0
| 0
| 0.659574
| 0
| 0
| 0.206818
| 0.040792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.208038
| false
| 0.208038
| 0.007092
| 0
| 0.21513
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4d6bc1143aab985b43d448b2532ebe4708382e63
| 437
|
py
|
Python
|
others/edge/speech_recognition/pytorch/src/deepspeech/logging/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 49
|
2018-11-02T15:04:40.000Z
|
2021-11-16T18:11:39.000Z
|
others/edge/speech_recognition/pytorch/src/deepspeech/logging/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 6
|
2018-12-03T19:29:49.000Z
|
2020-05-16T15:34:33.000Z
|
others/edge/speech_recognition/pytorch/src/deepspeech/logging/__init__.py
|
luluseptember/inference
|
acbc7b0bf288343ed81e62b69dea8afec03d679b
|
[
"Apache-2.0"
] | 16
|
2018-11-08T11:52:54.000Z
|
2021-11-16T18:11:28.000Z
|
# flake8: noqa
from deepspeech.logging.log_level_action import LogLevelAction
from deepspeech.logging.mixin import log_call
from deepspeech.logging.mixin import log_call_debug
from deepspeech.logging.mixin import log_call_info
from deepspeech.logging.mixin import log_call_warning
from deepspeech.logging.mixin import log_call_error
from deepspeech.logging.mixin import log_call_critical
from deepspeech.logging.mixin import LoggerMixin
| 43.7
| 62
| 0.878719
| 63
| 437
| 5.888889
| 0.285714
| 0.301887
| 0.45283
| 0.490566
| 0.716981
| 0.630728
| 0.630728
| 0
| 0
| 0
| 0
| 0.002488
| 0.080092
| 437
| 9
| 63
| 48.555556
| 0.920398
| 0.02746
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4db393dc38f84b1fcfe7cd9a1ca245098d17c20d
| 8,107
|
py
|
Python
|
finfast_torch/analyze/metrics.py
|
snugfox/finfast
|
ccfcc67deae24a29d3dfcd5ffa51988d13791b07
|
[
"MIT"
] | null | null | null |
finfast_torch/analyze/metrics.py
|
snugfox/finfast
|
ccfcc67deae24a29d3dfcd5ffa51988d13791b07
|
[
"MIT"
] | null | null | null |
finfast_torch/analyze/metrics.py
|
snugfox/finfast
|
ccfcc67deae24a29d3dfcd5ffa51988d13791b07
|
[
"MIT"
] | null | null | null |
import torch
from . import metrics_kernels as kernels
from typing import Optional
def __expect_rp_rb_rf(
rp: Optional[torch.Tensor], rb: Optional[torch.Tensor], rf: Optional[torch.Tensor]
) -> None:
"""Raises an exception if any of rp, rb, or rf have invalid dtypes or shapes
It will raise a ValueError if any of the following conditions are not met:
- rp and rb are both 2-D tensors
- rp and rb have dimensions (P, N) and (B, N), respectively
- rp and rb are both floating dtypes
- rf is a 0-D tensor (scalar)
- rf is a floating dtype
If any arguments are omitted, the corresponding criteria are ignored.
Args:
rp (torch.Tensor): Portfolio returns matrix
rb (torch.Tensor): Benchmark returns matrix
rf (torch.Tensor): Scalar risk-free rate
"""
if rp is not None:
if rp.ndim != 2:
raise ValueError("rp must be a 2-D tensor")
if not torch.is_floating_point(rp):
raise ValueError("rp must be a floating dtype")
if rb is not None:
if rb.ndim != 2:
raise ValueError("rb must be a 2-D tensor")
if not torch.is_floating_point(rb):
raise ValueError("rb must be a floating dtype")
if rf is not None:
if rf.ndim != 0:
raise ValueError("rf must be a 0-D tensor")
if not torch.is_floating_point(rf):
raise ValueError("rf must be a floating dtype")
if (rp is not None) and (rb is not None) and rp.shape[1] != rb.shape[1]:
raise ValueError("dimension 1 of rp and rb must be equal")
def beta(rp: torch.Tensor, rb: torch.Tensor) -> torch.Tensor:
"""Returns the betas for all pairs of p portfolios and b benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
beta for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.beta(rp, rb)
def alpha(rp: torch.Tensor, rb: torch.Tensor, rf: torch.Tensor) -> torch.Tensor:
"""Returns the alphas for all pairs of p portfolios and b benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
rf (torch.Tensor): Scalar risk-free rate (as a 0-D tensor)
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
alpha for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, rf)
return kernels.alpha(rp, rb, rf)
def sharpe(rp: torch.Tensor, rf: torch.Tensor) -> torch.Tensor:
"""Returns the sharpe ratios for p portfolios
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rf (torch.Tensor): Scalar risk-free rate (as a 0-D tensor)
Returns:
torch.Tensor: p-by-1 column vector where the i-th entry corresponds to
the sharpe ratio for the i-th portfolio
"""
__expect_rp_rb_rf(rp, None, rf)
return kernels.sharpe(rp, rf)
def treynor(rp: torch.Tensor, rb: torch.Tensor, rf: torch.Tensor) -> torch.Tensor:
"""Returns the treynor ratios for all pairs of p portfolios and b benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
rf (torch.Tensor): Scalar risk-free rate (as a 0-D tensor)
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
treynor ratio for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, rf)
return kernels.treynor(rp, rb, rf)
def sortino(rp: torch.Tensor, rf: torch.Tensor) -> torch.Tensor:
"""Returns the sortino ratios for p portfolios
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rf (torch.Tensor): Scalar risk-free rate (as a 0-D tensor)
Returns:
torch.Tensor: p-by-1 column vector where the i-th entry corresponds to
the sortino ratio for the i-th portfolio
"""
__expect_rp_rb_rf(rp, None, rf)
return kernels.sortino(rp, rf)
def tracking_error(rp: torch.Tensor, rb: torch.Tensor) -> torch.Tensor:
"""Returns the tracking errors for all pairs of p portfolios and b
benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
tracking error for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.tracking_error(rp, rb)
def information(rp: torch.Tensor, rb: torch.Tensor) -> float:
"""Returns the information ratios for all pairs of p portfolios and b
benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
information ratio for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.information(rp, rb)
def up_capture(rp: torch.Tensor, rb: torch.Tensor) -> torch.Tensor:
"""Returns the up-market capture ratios for all pairs of p portfolios and b
benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
up-market capture ratio for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.up_capture(rp, rb)
def down_capture(rp: torch.Tensor, rb: torch.Tensor) -> torch.Tensor:
"""Returns the down-market capture ratios for all pairs of p portfolios and
b benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
down-market capture ratio for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.down_capture(rp, rb)
def capture(rp: torch.Tensor, rb: torch.Tensor) -> torch.Tensor:
"""Returns the capture ratios for all pairs of p portfolios and b benchmarks
Args:
rp (torch.Tensor): p-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th portfolio
rb (torch.Tensor): b-by-n matrix where the (i, j) entry corresponds to
the j-th return of the i-th benchmark
Returns:
torch.Tensor: p-by-b matrix where the (i, j) entry corresponds to the
capture ratio for the i-th portfolio and j-th benchmark
"""
__expect_rp_rb_rf(rp, rb, None)
return kernels.capture(rp, rb)
| 37.188073
| 86
| 0.649932
| 1,335
| 8,107
| 3.89588
| 0.08015
| 0.145933
| 0.034609
| 0.113055
| 0.813882
| 0.801192
| 0.762546
| 0.756585
| 0.750433
| 0.750433
| 0
| 0.002835
| 0.260392
| 8,107
| 217
| 87
| 37.359447
| 0.864576
| 0.634883
| 0
| 0.188679
| 0
| 0
| 0.076144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.207547
| false
| 0
| 0.056604
| 0
| 0.45283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4df92cea20e604187f12842fb39ff420ceba1430
| 23,840
|
py
|
Python
|
paper/plot_fisher.py
|
AdriJD/cmb_sst_ksw
|
635f0627a3c8a36c743cdf8955e3671352ab6d90
|
[
"MIT"
] | null | null | null |
paper/plot_fisher.py
|
AdriJD/cmb_sst_ksw
|
635f0627a3c8a36c743cdf8955e3671352ab6d90
|
[
"MIT"
] | null | null | null |
paper/plot_fisher.py
|
AdriJD/cmb_sst_ksw
|
635f0627a3c8a36c743cdf8955e3671352ab6d90
|
[
"MIT"
] | null | null | null |
'''
Plot output from calc_fisher.
'''
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from scipy.interpolate import CubicSpline, PchipInterpolator
import numpy as np
import os
matplotlib.rcParams['mathtext.fontset'] = 'cm'
matplotlib.rcParams['font.family'] = 'STIXGeneral'
opj = os.path.join
def plot_cv_scaling(fisher_dir, out_dir, prim_template='local'):
'''
Arguments
---------
fisher_dir : str
Directory containing fisher pkl files.
out_dir : str
Directory for output figures.
'''
lmax_start = 500
lmax_end = 4900
lmax_steps = 10
lmax_arr = np.logspace(np.log10(lmax_start), np.log10(lmax_end), lmax_steps)
lmax_arr = lmax_arr.astype(int)
lmin_b_arr = np.asarray([2, 20, 30, 50, 80])
pol_opts_arr = [dict(no_ee=False, no_tt=False),
dict(no_ee=True, no_tt=False),
dict(no_ee=False, no_tt=True)]
r_arr = [0, 0.001, 0.01, 0.1]
noise_amp_temp = 0
noise_amp_e = 0
noise_amp_b = 0
lmin_e = 2
A_lens = 0.1
# Array to fill with loaded fisher values.
fnl_arr = np.ones((len(r_arr), lmin_b_arr.size, len(pol_opts_arr), lmax_steps))
fnl_arr *= np.nan
# Load pickle files.
for ridx, r in enumerate(r_arr):
for lidx, lmax in enumerate(lmax_arr):
for lidx_b, lmin_b in enumerate(lmin_b_arr):
for pidx, pol_opts in enumerate(pol_opts_arr):
no_ee = pol_opts['no_ee']
no_tt = pol_opts['no_tt']
tag = ('{}_nt{:.4f}_ne{:.4f}_nb{:.4f}_lb{:d}_le{:d}_nee{:d}'
'_ntt{:d}_a{:.4f}_r{:.4f}_l{:d}'.format(prim_template,
noise_amp_temp, noise_amp_e, noise_amp_b, lmin_b,
lmin_e, int(no_ee), int(no_tt), A_lens, r, lmax))
try:
fisher_file = opj(fisher_dir, 'f_{}.pkl'.format(tag))
fisher_opts = np.load(fisher_file)
except IOError:
print('{} not found'.format(fisher_file))
continue
fnl_arr[ridx, lidx_b, pidx, lidx] = fisher_opts['sigma_fnl']
# r, lmin_b, pol, lmax
print fnl_arr[:,0,0,0]
# Interpolate.
i_fact = 20
lmax_arr_i = np.logspace(np.log10(lmax_start), np.log10(lmax_end), i_fact * lmax_steps)
fnl_arr_i = np.ones((len(r_arr), lmin_b_arr.size, len(pol_opts_arr), i_fact * lmax_steps))
for i in xrange(fnl_arr.shape[0]):
for j in xrange(fnl_arr.shape[1]):
for k in xrange(fnl_arr.shape[2]):
cs = PchipInterpolator(lmax_arr, fnl_arr[i,j,k,:])
fnl_arr_i[i,j,k,:] = cs(lmax_arr_i)
fnl_arr = fnl_arr_i
lmax_arr = lmax_arr_i
# Plot.
font = {'size' : 12}
matplotlib.rc('font', **font)
fig, axs = plt.subplots(ncols=2, nrows=2, figsize=(4, 4), sharey=True, sharex=False)
plot_opts = dict(color='black')
lstyles = ['-', '-.', '', '--', ':']
for lidx_b, lmin_b in enumerate(lmin_b_arr):
for pidx, pol_opts in enumerate(pol_opts_arr):
if pidx > 0:
alpha = 0.5
continue
else:
alpha = 1
if lmin_b == 30:
continue
label = r'$\ell_{\mathrm{min}}^B = '+str(lmin_b)+'$'
if lidx_b < 2:
label_a = label
label_b = None
else:
label_a = None
label_b = label
axs[0,0].plot(lmax_arr, fnl_arr[0,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], **plot_opts)
axs[0,1].plot(lmax_arr, fnl_arr[1,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], **plot_opts)
axs[1,0].plot(lmax_arr, fnl_arr[2,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], label=label_a, **plot_opts)
axs[1,1].plot(lmax_arr, fnl_arr[3,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], label=label_b, **plot_opts)
fig.text(0.001, 0.5, r'$\sigma(\hat{f}_{\mathrm{NL}}^{\, \mathrm{tot}})$',
ha='center', va='center', rotation='vertical')
fig.text(0.5, 0.03, r'harmonic band-limit $\ell_{\mathrm{max}}$',
ha='center', va='center', rotation='horizontal')
fig.suptitle(r'Cosmic variance only, $A^{BB}_{\mathrm{lens}} = 0.1$', y=.95)
import matplotlib.ticker as ticker
for i, ax in enumerate(axs.ravel()):
ax.set_yscale('log')
ax.set_xscale('log')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='major')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='minor',
labelsize=10)
ax.set_xlim(400, 6000)
ax.set_ylim(4e-3, 7e0)
ax.text(0.9, 0.85, r'$r='+str(r_arr[i])+'$', transform=ax.transAxes,
horizontalalignment='right')
axs[1,1].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[1,1].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[1,0].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[1,0].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[1,1].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
axs[1,0].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
fig.subplots_adjust(hspace=0., wspace=0.)
fig.savefig(opj(out_dir, 'cv_lim_{}.pdf'.format(prim_template)),
dpi=300, bbox_inches='tight')
def plot_cv_scaling_A_lens(fisher_dir, out_dir, prim_template='local'):
'''
Arguments
---------
fisher_dir : str
Directory containing fisher pkl files.
out_dir : str
Directory for output figures.
'''
lmax_start = 500
lmax_end = 4900
lmax_steps = 10
lmax_arr = np.logspace(np.log10(lmax_start), np.log10(lmax_end), lmax_steps)
lmax_arr = lmax_arr.astype(int)
lmin_b_arr = np.asarray([2, 20, 30, 50, 80])
pol_opts_arr = [dict(no_ee=False, no_tt=False),
dict(no_ee=True, no_tt=False),
dict(no_ee=False, no_tt=True)]
A_lens_arr = [0, 0.1, 0.5, 1]
noise_amp_temp = 0
noise_amp_e = 0
noise_amp_b = 0
lmin_e = 2
r = 0.001
# Array to fill with loaded fisher values.
fnl_arr = np.ones((len(A_lens_arr), lmin_b_arr.size, len(pol_opts_arr), lmax_steps))
fnl_arr *= np.nan
# Load pickle files.
for aidx, A_lens in enumerate(A_lens_arr):
for lidx, lmax in enumerate(lmax_arr):
for lidx_b, lmin_b in enumerate(lmin_b_arr):
for pidx, pol_opts in enumerate(pol_opts_arr):
no_ee = pol_opts['no_ee']
no_tt = pol_opts['no_tt']
tag = ('{}_nt{:.4f}_ne{:.4f}_nb{:.4f}_lb{:d}_le{:d}_nee{:d}'
'_ntt{:d}_a{:.4f}_r{:.4f}_l{:d}'.format(prim_template,
noise_amp_temp, noise_amp_e, noise_amp_b, lmin_b,
lmin_e, int(no_ee), int(no_tt), A_lens, r, lmax))
try:
fisher_file = opj(fisher_dir, 'f_{}.pkl'.format(tag))
fisher_opts = np.load(fisher_file)
except IOError:
print('{} not found'.format(fisher_file))
continue
fnl_arr[aidx, lidx_b, pidx, lidx] = fisher_opts['sigma_fnl']
# r, lmin_b, pol, lmax
print fnl_arr[:,0,0,0]
# Interpolate.
i_fact = 20
lmax_arr_i = np.logspace(np.log10(lmax_start), np.log10(lmax_end), i_fact * lmax_steps)
fnl_arr_i = np.ones((len(A_lens_arr), lmin_b_arr.size, len(pol_opts_arr), i_fact * lmax_steps))
for i in xrange(fnl_arr.shape[0]):
for j in xrange(fnl_arr.shape[1]):
for k in xrange(fnl_arr.shape[2]):
cs = PchipInterpolator(lmax_arr, fnl_arr[i,j,k,:])
fnl_arr_i[i,j,k,:] = cs(lmax_arr_i)
fnl_arr = fnl_arr_i
lmax_arr = lmax_arr_i
# Plot.
font = {'size' : 12}
matplotlib.rc('font', **font)
fig, axs = plt.subplots(ncols=2, nrows=2, figsize=(4, 4), sharey=True, sharex=False)
plot_opts = dict(color='black')
lstyles = ['-', '-.', '', '--', ':']
for lidx_b, lmin_b in enumerate(lmin_b_arr):
for pidx, pol_opts in enumerate(pol_opts_arr):
if pidx > 0:
alpha = 0.5
continue
else:
alpha = 1
if lmin_b == 30:
continue
label = r'$\ell_{\mathrm{min}}^B = '+str(lmin_b)+'$'
if lidx_b < 2:
label_a = label
label_b = None
else:
label_a = None
label_b = label
axs[0,0].plot(lmax_arr, fnl_arr[0,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], **plot_opts)
axs[0,1].plot(lmax_arr, fnl_arr[1,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], **plot_opts)
axs[1,0].plot(lmax_arr, fnl_arr[2,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], label=label_a, **plot_opts)
axs[1,1].plot(lmax_arr, fnl_arr[3,lidx_b,pidx,:], alpha=alpha,
ls=lstyles[lidx_b], label=label_b, **plot_opts)
fig.text(0.001, 0.5, r'$\sigma(\hat{f}_{\mathrm{NL}}^{\, \mathrm{tot}})$',
ha='center', va='center', rotation='vertical')
fig.text(0.5, 0.03, r'harmonic band-limit $\ell_{\mathrm{max}}$',
ha='center', va='center', rotation='horizontal')
fig.suptitle(r'Cosmic variance only, $r= 0.001$', y=.95)
for i, ax in enumerate(axs.ravel()):
ax.set_yscale('log')
ax.set_xscale('log')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='major')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='minor',
labelsize=10)
ax.set_xlim(400, 6000)
ax.set_ylim(4e-3, 7e0)
ax.text(0.9, 0.85, r'$A^{BB}_{\mathrm{lens}}='+str(A_lens_arr[i])+'$',
transform=ax.transAxes, horizontalalignment='right')
axs[1,1].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[1,1].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[1,0].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[1,0].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[1,1].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
axs[1,0].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
fig.subplots_adjust(hspace=0., wspace=0.)
fig.savefig(opj(out_dir, 'cv_lim_alens.pdf'), dpi=300, bbox_inches='tight')
def plot_pol(fisher_dir, out_dir, prim_template='local', plot_invcov=False):
'''
Arguments
---------
fisher_dir : str
Directory containing fisher pkl files.
out_dir : str
Directory for output figures.
'''
lmax_start = 500
lmax_end = 4900
lmax_steps = 10
lmax_arr = np.logspace(np.log10(lmax_start), np.log10(lmax_end), lmax_steps)
lmax_arr = lmax_arr.astype(int)
lmin_b = 2
pol_opts_arr = [dict(no_ee=True, no_tt=False),
dict(no_ee=False, no_tt=True),
dict(no_ee=False, no_tt=False)]
pol_opts_names = [r'$B+T$', r'$B+E$', r'$B+T + E$']
r = 0.001
lmin_e = 2
# Add noise array.
noise_opts_arr = [dict(noise_amp_temp=0, noise_amp_e=0, noise_amp_b=0),
dict(noise_amp_temp=4, noise_amp_e=4*np.sqrt(2),
noise_amp_b=4*np.sqrt(2))]
n_ell_arr = ['0', '4']
A_lens = 0.5
# Array to fill with loaded fisher values.
fnl_arr = np.ones((len(noise_opts_arr), len(pol_opts_arr), lmax_steps))
fnl_arr *= np.nan
# Load pickle files.
for nidx, noise_opts in enumerate(noise_opts_arr):
for lidx, lmax in enumerate(lmax_arr):
for pidx, pol_opts in enumerate(pol_opts_arr):
no_ee = pol_opts['no_ee']
no_tt = pol_opts['no_tt']
noise_amp_temp = noise_opts['noise_amp_temp']
noise_amp_e = noise_opts['noise_amp_e']
noise_amp_b = noise_opts['noise_amp_b']
tag = ('{}_nt{:.4f}_ne{:.4f}_nb{:.4f}_lb{:d}_le{:d}_nee{:d}'
'_ntt{:d}_a{:.4f}_r{:.4f}_l{:d}'.format(prim_template,
noise_amp_temp, noise_amp_e, noise_amp_b, lmin_b,
lmin_e, int(no_ee), int(no_tt), A_lens, r, lmax))
# Load fisher.
try:
fisher_file = opj(fisher_dir, 'f_{}.pkl'.format(tag))
fisher_opts = np.load(fisher_file)
except IOError:
print('{} not found'.format(fisher_file))
continue
fnl_arr[nidx, pidx, lidx] = fisher_opts['sigma_fnl']
if plot_invcov:
# Load invcov and cov, plot right away.
try:
invcov_file = opj(fisher_dir, 'invcov_{}.pkl'.format(tag))
invcov_opts = np.load(invcov_file)
except IOError:
print('{} not found'.format(invcov_file))
continue
invcov_name = opj(out_dir, 'invcov', 'invcov_{}.png'.format(tag))
cov_name = opj(out_dir, 'invcov', 'cov_{}.png'.format(tag))
ells = invcov_opts['ells']
invcov = invcov_opts['invcov']
cov = invcov_opts['cov']
plot_invcov(ells, invcov, invcov_name, dell=False)
plot_invcov(ells, cov, cov_name)
# pol, lmax
print fnl_arr.shape
# Interpolate.
i_fact = 20
lmax_arr_i = np.logspace(np.log10(lmax_start), np.log10(lmax_end), i_fact * lmax_steps)
fnl_arr_i = np.ones((len(noise_opts_arr), len(pol_opts_arr), i_fact * lmax_steps))
for i in xrange(fnl_arr.shape[0]):
for j in xrange(fnl_arr.shape[1]):
cs = PchipInterpolator(lmax_arr, fnl_arr[i,j,:])
fnl_arr_i[i,j,:] = cs(lmax_arr_i)
fnl_arr = fnl_arr_i
lmax_arr = lmax_arr_i
# Plot.
font = {'size' : 12}
matplotlib.rc('font', **font)
fig, axs = plt.subplots(ncols=2, nrows=1, figsize=(4, 2), sharey=True, sharex=False)
plot_opts = dict(color='black')
lstyles = ['--', '-.', '-', ':']
for pidx, pol_opts in enumerate(pol_opts_arr):
label = pol_opts_names[pidx]
if pidx < 2:
label_a = label
label_b = None
else:
label_a = None
label_b = label
axs[0].plot(lmax_arr, fnl_arr[0,pidx,:],
ls=lstyles[pidx], label=label_a, **plot_opts)
axs[1].plot(lmax_arr, fnl_arr[1,pidx,:],
ls=lstyles[pidx], label=label_b, **plot_opts)
fig.text(0.001, 0.5, r'$\sigma(\hat{f}_{\mathrm{NL}}^{\, \mathrm{tot}})$',
ha='center', va='center', rotation='vertical')
fig.text(0.5, -0.05, r'harmonic band-limit $\ell_{\mathrm{max}}$',
ha='center', va='center', rotation='horizontal')
fig.suptitle(r'$A^{BB}_{\mathrm{lens}} = '+str(A_lens)+'$, $r='+str(r)+'$', y=1.03)
for i, ax in enumerate(axs.ravel()):
ax.set_yscale('log')
ax.set_xscale('log')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='major')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='minor',
labelsize=10)
ax.set_xlim(400, 6000)
ax.set_ylim(4e-3, 7e0)
ax.text(0.9, 0.85,
r"$"+str(n_ell_arr[i])+"\ \mu \mathrm{K}$-$\mathrm{arcmin}$",
transform=ax.transAxes, horizontalalignment='right')
axs[1].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[1].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[0].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
minor=True)
axs[0].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
axs[0].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
axs[1].legend(ncol=1, frameon=False, loc=(0.05, 0.001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
fig.subplots_adjust(hspace=0., wspace=0.)
fig.savefig(opj(out_dir, 'pol.pdf'), dpi=300, bbox_inches='tight')
def plot_noise(fisher_dir, out_dir, prim_template='local'):
'''
Plot sigma as function of B noise for several T/E noise curves and two
choices of A_lens.
Arguments
---------
fisher_dir : str
Directory containing fisher pkl files.
out_dir : str
Directory for output figures.
'''
r = 0.001
lmax = 4900
lmin_b = 50
pol_opts = dict(no_ee=False, no_tt=False)
lmin_e = 2
# noise_i_arr = [0.3, 1, 3, 10]
noise_i_arr = [10, 1]
noise_b_arr = np.logspace(np.log10(0.3), np.log10(50), 10)
A_lens_arr = [0.1, 1]
fnl_arr = np.ones((len(A_lens_arr), len(noise_i_arr), len(noise_b_arr)))
fnl_arr *= np.nan
# Load pickle files.
for aidx, A_lens in enumerate(A_lens_arr):
for ni_idx, n_i in enumerate(noise_i_arr):
for nb_idx, n_b in enumerate(noise_b_arr):
no_ee = pol_opts['no_ee']
no_tt = pol_opts['no_tt']
noise_amp_temp = n_i
noise_amp_e = n_i * np.sqrt(2)
noise_amp_b = n_b
tag = ('{}_nt{:.4f}_ne{:.4f}_nb{:.4f}_lb{:d}_le{:d}_nee{:d}'
'_ntt{:d}_a{:.4f}_r{:.4f}_l{:d}'.format(prim_template,
noise_amp_temp, noise_amp_e, noise_amp_b, lmin_b,
lmin_e, int(no_ee), int(no_tt), A_lens, r, lmax))
# Load fisher.
try:
fisher_file = opj(fisher_dir, 'f_{}.pkl'.format(tag))
fisher_opts = np.load(fisher_file)
except IOError:
print('{} not found'.format(fisher_file))
continue
fnl_arr[aidx, ni_idx, nb_idx] = fisher_opts['sigma_fnl']
# pol, lmax
print fnl_arr.shape
# Interpolate.
i_fact = 20
noise_b_arr_i = np.logspace(np.log10(0.3), np.log10(50), i_fact * 10)
fnl_arr_i = np.ones((len(A_lens_arr), len(noise_i_arr), i_fact * len(noise_b_arr)))
for i in xrange(fnl_arr.shape[0]):
for j in xrange(fnl_arr.shape[1]):
cs = PchipInterpolator(noise_b_arr, fnl_arr[i,j,:])
fnl_arr_i[i,j,:] = cs(noise_b_arr_i)
# fnl_arr = fnl_arr_i
# noise_b_arr = noise_b_arr_i
# Plot.
font = {'size' : 12}
matplotlib.rc('font', **font)
fig, axs = plt.subplots(ncols=2, nrows=1, figsize=(4, 2), sharey=True, sharex=False)
plot_opts = dict(color='black')
lstyles = ['-', ':', '--', '-.']
for ni_idx, n_i in enumerate(noise_i_arr):
label = r"$"+str(n_i)+"\ \mu \mathrm{K}$-$'$"
# if ni_idx < 2:
# label_a = label
# label_b = None
# else:
# label_a = None
# label_b = label
axs[0].plot(noise_b_arr, fnl_arr[0,ni_idx,:],
ls=lstyles[ni_idx], label=None, **plot_opts)
axs[1].plot(noise_b_arr, fnl_arr[1,ni_idx,:],
ls=lstyles[ni_idx], label=label, **plot_opts)
fig.text(0.001, 0.5, r'$\sigma(\hat{f}_{\mathrm{NL}}^{\, \mathrm{tot}})$',
ha='center', va='center', rotation='vertical')
fig.text(0.5, -0.05, r'$B$-mode noise [$\mu \mathrm{K}$-$\mathrm{arcmin}$]',
ha='center', va='center', rotation='horizontal')
fig.suptitle(r'$\ell_{\mathrm{min}}^B='+str(lmin_b)+'$, $r='+str(r)+'$', y=1.03)
for i, ax in enumerate(axs.ravel()):
ax.set_yscale('log')
ax.set_xscale('log')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='major')
ax.tick_params(axis='both', direction='in', top=True, right=True, which='minor',
labelsize=10)
ax.set_xlim(0.2, 70)
# ax.set_ylim(4e-3, 7e0)
ax.text(0.1, 0.85,
r"$A^{BB}_{\mathrm{lens}} = "+str(A_lens_arr[i])+"$",
transform=ax.transAxes, horizontalalignment='left')
# axs[1].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
# minor=True)
# axs[1].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
# axs[0].xaxis.set_ticklabels(['','','','','','','','',r'$5\times10^3$'],
# minor=True)
# axs[0].set_xticks([500, 600, 700, 800, 900, 2000, 3000, 4000, 5000], minor=True)
# axs[0].legend(ncol=1, frameon=False, loc=(0.45, 0.001),
# markerscale=.1, handletextpad=0.3, handlelength=1.3,
# prop={'size': 12})
axs[1].legend(ncol=1, frameon=False, loc=(0.42, 0.0001),
markerscale=.1, handletextpad=0.3, handlelength=1.3,
prop={'size': 12})
fig.subplots_adjust(hspace=0., wspace=0.)
fig.savefig(opj(out_dir, 'noise.pdf'), dpi=300, bbox_inches='tight')
def plot_invcov(ells, invcov, filename, dell=True):
''' Plot invcov and cov. '''
nls_dict = {'TT': 0, 'EE': 1, 'BB': 2, 'TE': 3,
'ET': 3, 'BT': 4, 'TB': 4, 'EB': 5,
'BE': 5}
if dell:
dells = ells * (ells + 1) / 2. / np.pi
else:
dells = 1.
fig, axs = plt.subplots(nrows=3, ncols=3, figsize=(9,7))
for pidx1, pol1 in enumerate(['T', 'E', 'B']):
for pidx2, pol2 in enumerate(['T', 'E', 'B']):
axs[pidx1,pidx2].plot(ells, dells*invcov[:,pidx1,pidx2],
label=pol1+pol2)
for ax in axs.reshape(-1):
ax.legend()
plt.tight_layout()
fig.savefig(filename)
plt.close(fig)
if __name__ == '__main__':
base_dir = '/mn/stornext/d8/ITA/spider/adri/analysis/'
ana_dir = opj(base_dir, '20190411_beta')
# out_dir = opj(ana_dir, 'img/fisher')
out_dir = opj(ana_dir, 'img/img_temperature')
fisher_dir = opj(ana_dir, 'fisher')
#plot_cv_scaling(fisher_dir, out_dir, prim_template='local')
#plot_cv_scaling_A_lens(fisher_dir, out_dir)
plot_pol(fisher_dir, out_dir)
#plot_noise(fisher_dir, out_dir)
| 35.266272
| 99
| 0.533977
| 3,382
| 23,840
| 3.53696
| 0.089592
| 0.029092
| 0.0158
| 0.014128
| 0.86223
| 0.845678
| 0.824946
| 0.800368
| 0.782227
| 0.761913
| 0
| 0.054229
| 0.304614
| 23,840
| 675
| 100
| 35.318519
| 0.66733
| 0.060193
| 0
| 0.687943
| 0
| 0.009456
| 0.093476
| 0.036174
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.016548
| null | null | 0.021277
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
12989e1bd475acdc3cceb23c4a424ba0563ded10
| 144
|
py
|
Python
|
nitorch/plot/__init__.py
|
balbasty/nitorch
|
d30c3125a8a66ea1434f2b39ed03338afd9724b4
|
[
"MIT"
] | 46
|
2020-07-31T10:14:05.000Z
|
2022-03-24T12:51:46.000Z
|
nitorch/plot/__init__.py
|
balbasty/nitorch
|
d30c3125a8a66ea1434f2b39ed03338afd9724b4
|
[
"MIT"
] | 36
|
2020-10-06T19:01:38.000Z
|
2022-02-03T18:07:35.000Z
|
nitorch/plot/__init__.py
|
balbasty/nitorch
|
d30c3125a8a66ea1434f2b39ed03338afd9724b4
|
[
"MIT"
] | 6
|
2021-01-05T14:59:05.000Z
|
2021-11-18T18:26:45.000Z
|
"""Plotting utilities."""
from . import gui
from .gui import *
from . import volumes
from .volumes import *
from . import vb
from .vb import *
| 16
| 25
| 0.701389
| 20
| 144
| 5.05
| 0.35
| 0.29703
| 0.316832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 144
| 8
| 26
| 18
| 0.863248
| 0.131944
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
12fa078aca887a11c958e1c30416828f00106a30
| 141
|
py
|
Python
|
src/services/__init__.py
|
ToucanBran/crypto-buyer
|
8cf8a03e4e8631499c1f2a5b1cceb2521d76d513
|
[
"MIT"
] | 1
|
2021-11-25T11:34:23.000Z
|
2021-11-25T11:34:23.000Z
|
src/services/__init__.py
|
ToucanBran/crypto-buyer
|
8cf8a03e4e8631499c1f2a5b1cceb2521d76d513
|
[
"MIT"
] | 1
|
2021-11-25T01:54:22.000Z
|
2021-11-26T22:23:10.000Z
|
src/services/__init__.py
|
ToucanBran/crypto-buyer
|
8cf8a03e4e8631499c1f2a5b1cceb2521d76d513
|
[
"MIT"
] | null | null | null |
from .buyer_logger import *
from .rabbitmq_wrapper import *
from .coin_service import *
from .spot_api_wrapper import *
from .buyer import *
| 23.5
| 31
| 0.787234
| 20
| 141
| 5.3
| 0.5
| 0.377358
| 0.320755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141844
| 141
| 5
| 32
| 28.2
| 0.876033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4272e3afff834b7913d9e4abb09be00794d0f815
| 7,087
|
py
|
Python
|
attention.py
|
Zhiquan-Wen/D-VQA
|
688c4dcc811f49b431daea81406e628ec71a7247
|
[
"BSD-3-Clause"
] | 25
|
2021-11-09T07:05:18.000Z
|
2022-03-30T19:37:49.000Z
|
attention.py
|
Zhiquan-Wen/D-VQA
|
688c4dcc811f49b431daea81406e628ec71a7247
|
[
"BSD-3-Clause"
] | 7
|
2021-11-16T15:53:18.000Z
|
2022-03-29T03:46:35.000Z
|
attention.py
|
Zhiquan-Wen/D-VQA
|
688c4dcc811f49b431daea81406e628ec71a7247
|
[
"BSD-3-Clause"
] | 2
|
2021-12-16T04:35:57.000Z
|
2022-03-06T06:08:06.000Z
|
import torch
import torch.nn as nn
from torch.nn.utils.weight_norm import weight_norm
from fc import FCNet, GTH, get_norm
# Default concat, 1 layer, output layer
class Att_0(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_0, self).__init__()
norm_layer = get_norm(norm)
self.nonlinear = FCNet([v_dim + q_dim, num_hid], dropout= dropout, norm= norm, act= act)
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
num_objs = v.size(1)
q = q.unsqueeze(1).repeat(1, num_objs, 1)
vq = torch.cat((v, q), 2)
joint_repr = self.nonlinear(vq)
logits = self.linear(joint_repr)
return logits
# concat, 2 layer, output layer
class Att_1(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_1, self).__init__()
norm_layer = get_norm(norm)
self.nonlinear = FCNet([v_dim + q_dim, num_hid, num_hid], dropout= dropout, norm= norm, act= act)
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
num_objs = v.size(1)
q = q.unsqueeze(1).repeat(1, num_objs, 1)
vq = torch.cat((v, q), 2)
joint_repr = self.nonlinear(vq)
logits = self.linear(joint_repr)
return logits
# 1 layer seperate, element-wise *, output layer
class Att_2(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_2, self).__init__()
norm_layer = get_norm(norm)
self.v_proj = FCNet([v_dim, num_hid], dropout= dropout, norm= norm, act= act)
self.q_proj = FCNet([q_dim, num_hid], dropout= dropout, norm= norm, act= act)
self.linear = norm_layer(nn.Linear(q_dim, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
batch, k, _ = v.size()
v_proj = self.v_proj(v) # [batch, k, num_hid]
q_proj = self.q_proj(q).unsqueeze(1).repeat(1, k, 1) # [batch, k, num_hid]
joint_repr = v_proj * q_proj
logits = self.linear(joint_repr)
return logits
# 1 layer seperate, element-wise *, 1 layer seperate, output layer
class Att_3(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_3, self).__init__()
norm_layer = get_norm(norm)
self.v_proj = FCNet([v_dim, num_hid], dropout= dropout, norm= norm, act= act)
self.q_proj = FCNet([q_dim, num_hid], dropout= dropout, norm= norm, act= act)
self.nonlinear = FCNet([num_hid, num_hid], dropout= dropout, norm= norm, act= act)
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
batch, k, _ = v.size()
v_proj = self.v_proj(v) # [batch, k, num_hid]
q_proj = self.q_proj(q).unsqueeze(1).repeat(1, k, 1) # [batch, k, num_hid]
joint_repr = v_proj * q_proj
joint_repr = self.nonlinear(joint_repr)
logits = self.linear(joint_repr)
return logits
# 1 layer seperate, element-wise *, 1 layer seperate, output layer
class Att_3S(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_3S, self).__init__()
norm_layer = get_norm(norm)
self.v_proj = FCNet([v_dim, num_hid], dropout=dropout, norm=norm, act=act)
self.q_proj = FCNet([q_dim, num_hid], dropout=dropout, norm=norm, act=act)
self.nonlinear = FCNet([num_hid, num_hid], dropout=dropout, norm=norm, act=act)
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.sigmoid(logits)
#w = nn.functional.leaky_relu(logits)
return w
def logits(self, v, q):
batch, k, _ = v.size()
v_proj = self.v_proj(v) # [batch, k, num_hid]
q_proj = self.q_proj(q).unsqueeze(1).repeat(1, k, 1) # [batch, k, num_hid]
joint_repr = v_proj * q_proj
joint_repr = self.nonlinear(joint_repr)
logits = self.linear(joint_repr)
return logits
# concat w/ 2 layer seperate, element-wise *, output layer
class Att_PD(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_PD, self).__init__()
norm_layer = get_norm(norm)
self.nonlinear = FCNet([v_dim + q_dim, num_hid, num_hid], dropout= dropout, norm= norm, act= act)
self.nonlinear_gate = FCNet([v_dim + q_dim, num_hid, num_hid], dropout= dropout, norm= norm, act= 'Sigmoid')
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
num_objs = v.size(1)
q = q.unsqueeze(1).repeat(1, num_objs, 1)
vq = torch.cat((v, q), 2)
joint_repr = self.nonlinear(vq)
gate = self.nonlinear_gate(vq)
logits = joint_repr*gate
logits = self.linear(logits)
return logits
# concat w/ 1 layer seperate, element-wise *, output layer
class Att_P(nn.Module):
def __init__(self, v_dim, q_dim, num_hid, norm, act, dropout=0.0):
super(Att_P, self).__init__()
norm_layer = get_norm(norm)
self.gated_tanh = GTH( in_dim= v_dim + q_dim, out_dim= num_hid, dropout= dropout, norm= norm, act= act)
self.linear = norm_layer(nn.Linear(num_hid, 1), dim=None)
def forward(self, v, q):
"""
v: [batch, k, vdim]
q: [batch, qdim]
"""
logits = self.logits(v, q)
w = nn.functional.softmax(logits, 1)
return w
def logits(self, v, q):
num_objs = v.size(1)
q = q.unsqueeze(1).repeat(1, num_objs, 1)
vq = torch.cat((v, q), 2)
joint_repr = self.gated_tanh(vq)
logits = self.linear(joint_repr)
return logits
| 35.974619
| 117
| 0.570481
| 1,049
| 7,087
| 3.642517
| 0.06673
| 0.0581
| 0.042397
| 0.03664
| 0.919393
| 0.906831
| 0.906831
| 0.906831
| 0.873855
| 0.857367
| 0
| 0.014994
| 0.294201
| 7,087
| 197
| 118
| 35.974619
| 0.7489
| 0.109214
| 0
| 0.796992
| 0
| 0
| 0.001189
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157895
| false
| 0
| 0.030075
| 0
| 0.345865
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c43f484ae7c8f6413879a9c2c5a438d2e8659f0b
| 10,764
|
py
|
Python
|
src/preprocess/main_preprocess.py
|
cfont03/Anomaly-breast-cancer-detection
|
d0fca05747b30afb546fe55ba3851883d99eb34f
|
[
"MIT"
] | null | null | null |
src/preprocess/main_preprocess.py
|
cfont03/Anomaly-breast-cancer-detection
|
d0fca05747b30afb546fe55ba3851883d99eb34f
|
[
"MIT"
] | null | null | null |
src/preprocess/main_preprocess.py
|
cfont03/Anomaly-breast-cancer-detection
|
d0fca05747b30afb546fe55ba3851883d99eb34f
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
from PIL import Image
if __name__ == '__main__':
from utils.generate_mask import generate_mask
from utils.horizontal_flip import horizontal_flip
from utils.map_img_txt import map_img_txt
from utils.noise import remove_noise
from utils.rotation import rotate_images
from utils.train_test_split import train_test_split
from utils.vertical_flip import vertical_flip
from utils.txt_csv import txt_to_csv
else:
pass
df_info = pd.read_csv("res/Info.txt", sep = " ", header = 'infer')
df0 = txt_to_csv(path_in="res/Info.txt", path_out="outputs/images_info_0.csv")
df1 = map_img_txt(path_in='outputs/images_info_0.csv', path_out='outputs/images_info_1.csv')
train, test = train_test_split(df1)
### Data Augmentation Techniques
x_y_hf_train, x_min_max_hf_train, y_min_max_hf_train = horizontal_flip(train)
x_y_hf_test, x_min_max_hf_test, y_min_max_hf_test = horizontal_flip(test)
remove_noise(train)
remove_noise(test)
x_y_train, x_min_max_train, y_min_max_train = rotate_images(train)
x_y_test, x_min_max_test, y_min_max_test = rotate_images(test)
generate_mask(train)
generate_mask(test)
x_y_vf_train, x_min_max_vf_train, y_min_max_vf_train = vertical_flip(train)
x_y_vf_test, x_min_max_vf_test, y_min_max_vf_test = vertical_flip(test)
### Map training dataset
data = []
x_coord = []
y_coord = []
categ = []
xmin = []
ymin = []
xmax = []
ymax = []
size = []
names = []
radius = []
image_path = []
masks = ['_mask']
hflips = ['_hflip']
noises = ['_noise']
vflips = ['_vflip']
rotate = ['_rotate']
with open('outputs/train.csv') as file:
next(file) # skip header
z = 0
for l in file:
for r in rotate:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, r)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, r)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_train[z][0][0])
y_coord.append(x_y_train[z][0][1])
xmin.append(x_min_max_train[z][0][0])
xmax.append(x_min_max_train[z][0][1])
ymin.append(y_min_max_train[z][0][0])
ymax.append(y_min_max_train[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for h in hflips:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, h)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, h)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_hf_train[z][0][0])
y_coord.append(x_y_hf_train[z][0][1])
xmin.append(x_min_max_hf_train[z][0][0])
xmax.append(x_min_max_hf_train[z][0][1])
ymin.append(y_min_max_hf_train[z][0][0])
ymax.append(y_min_max_hf_train[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for v in vflips:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, v)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, v)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_vf_train[z][0][0])
y_coord.append(x_y_vf_train[z][0][1])
xmin.append(x_min_max_vf_train[z][0][0])
xmax.append(x_min_max_vf_train[z][0][1])
ymin.append(y_min_max_vf_train[z][0][0])
ymax.append(y_min_max_vf_train[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for n in noises:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, n)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, n)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(np.array(l.split(",")[6]))
y_coord.append(np.array(l.split(",")[7]))
xmin.append(np.array(l.split(",")[10]))
xmax.append(np.array(l.split(",")[11]))
ymin.append(np.array(l.split(",")[12]))
ymax.append(int(np.array(l.split(",")[13])))
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for m in masks:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.pgm'.format(i, m)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, m)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(np.array(l.split(",")[6]))
y_coord.append(np.array(l.split(",")[7]))
xmin.append(np.array(l.split(",")[10]))
xmax.append(np.array(l.split(",")[11]))
ymin.append(np.array(l.split(",")[12]))
ymax.append(int(np.array(l.split(",")[13])))
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
z +=1
arr = np.array([names, data, size, categ, x_coord, y_coord, xmin, xmax, ymin, ymax, radius, image_path], dtype = object).T.tolist()
train_ = pd.DataFrame(data = arr, columns = ['name', 'features', 'size', 'cat', 'x_coord', 'y_coord', 'xmin', 'xmax',
'ymin', 'ymax', 'radius', 'image path'])
train_pp = pd.concat([train, train_])
train_pp['radius'].apply(float).apply(int) + train_pp['x_coord'].apply(float).apply(int) + train_pp['y_coord'].apply(float).apply(int)
+ train_pp['xmin'].apply(int)+ train_pp['xmax'].apply(int) + train_pp['ymin'].apply(int) + train_pp['ymax'].apply(int)
train_pp.to_csv("outputs/images_preprocess_train.csv", sep = ";")
### Map testing dataset
data = []
x_coord = []
y_coord = []
categ = []
xmin = []
ymin = []
xmax = []
ymax = []
size = []
names = []
radius = []
image_path = []
masks = ['_mask']
hflips = ['_hflip']
noises = ['_noise']
vflips = ['_vflip']
rotate = ['_rotate']
with open('outputs/test.csv') as file:
next(file) # skip header
z = 0
for l in file:
for r in rotate:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, r)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, r)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_test[z][0][0])
y_coord.append(x_y_test[z][0][1])
xmin.append(x_min_max_test[z][0][0])
xmax.append(x_min_max_test[z][0][1])
ymin.append(y_min_max_test[z][0][0])
ymax.append(y_min_max_test[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for h in hflips:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, h)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, h)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_hf_test[z][0][0])
y_coord.append(x_y_hf_test[z][0][1])
xmin.append(x_min_max_hf_test[z][0][0])
xmax.append(x_min_max_hf_test[z][0][1])
ymin.append(y_min_max_hf_test[z][0][0])
ymax.append(y_min_max_hf_test[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for v in vflips:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, v)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, v)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(x_y_vf_test[z][0][0])
y_coord.append(x_y_vf_test[z][0][1])
xmin.append(x_min_max_vf_test[z][0][0])
xmax.append(x_min_max_vf_test[z][0][1])
ymin.append(y_min_max_vf_test[z][0][0])
ymax.append(y_min_max_vf_test[z][0][1])
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for n in noises:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.jpeg'.format(i, n)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, n)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(np.array(l.split(",")[6]))
y_coord.append(np.array(l.split(",")[7]))
xmin.append(np.array(l.split(",")[10]))
xmax.append(np.array(l.split(",")[11]))
ymin.append(np.array(l.split(",")[12]))
ymax.append(int(np.array(l.split(",")[13])))
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
for m in masks:
i = l.split(",")[1]
path = 'res/all-mias/{:}{:}.pgm'.format(i, m)
img = Image.open(path)
data_arr = np.array(img.getdata())
width, height = np.array(img.size)
data.append(data_arr)
size.append((width, height))
names.append(np.array('{:}{:}'.format(i, m)))
categ.append(np.array(l.split(",")[5]))
x_coord.append(np.array(l.split(",")[6]))
y_coord.append(np.array(l.split(",")[7]))
xmin.append(np.array(l.split(",")[10]))
xmax.append(np.array(l.split(",")[11]))
ymin.append(np.array(l.split(",")[12]))
ymax.append(int(np.array(l.split(",")[13])))
radius.append(np.array((l.split(",")[8]).rstrip()))
image_path.append(path)
z +=1
arr = np.array([names, data, size, categ, x_coord, y_coord, xmin, xmax, ymin, ymax, radius, image_path], dtype = object).T.tolist()
test_ = pd.DataFrame(data = arr, columns = ['name', 'features', 'size', 'cat', 'x_coord', 'y_coord', 'xmin', 'xmax',
'ymin', 'ymax', 'radius', 'image path'])
test_pp = pd.concat([test, test_])
test_pp['radius'].apply(float).apply(int) + test_pp['x_coord'].apply(float).apply(int) + test_pp['y_coord'].apply(float).apply(int)
+ test_pp['xmin'].apply(int)+ test_pp['xmax'].apply(int) + test_pp['ymin'].apply(int) + test_pp['ymax'].apply(int)
test_pp.to_csv("outputs/images_preprocess_test.csv", sep = ";")
| 36.488136
| 135
| 0.605537
| 1,724
| 10,764
| 3.581787
| 0.071346
| 0.086154
| 0.105263
| 0.092632
| 0.83498
| 0.805668
| 0.786073
| 0.765668
| 0.765668
| 0.643077
| 0
| 0.017243
| 0.181067
| 10,764
| 295
| 136
| 36.488136
| 0.683267
| 0.00864
| 0
| 0.714286
| 1
| 0
| 0.077955
| 0.035835
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.003759
| 0.041353
| 0
| 0.041353
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6746aed80884ac7bc216835df751a53dbb1a637d
| 172
|
py
|
Python
|
tests/python_print_width/print_width.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 362
|
2018-02-17T10:25:11.000Z
|
2022-03-30T21:04:59.000Z
|
tests/python_print_width/print_width.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 70
|
2018-02-17T04:00:14.000Z
|
2019-08-21T18:01:52.000Z
|
tests/python_print_width/print_width.py
|
hixio-mh/plugin-python
|
d59dca4b6166dc20eec3e7aa57b0649c072507ce
|
[
"MIT"
] | 36
|
2018-02-18T23:11:25.000Z
|
2021-09-20T07:19:36.000Z
|
def foo():
return "line_with_79_chars_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
return "line_with_80_chars_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
| 43
| 80
| 0.877907
| 14
| 172
| 10.214286
| 0.714286
| 0.13986
| 0.195804
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025316
| 0.081395
| 172
| 3
| 81
| 57.333333
| 0.879747
| 0
| 0
| 0
| 0
| 0
| 0.773256
| 0.773256
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
674e223aa9c9a3f3c8aa39a658b579ea26b1dce8
| 9,754
|
py
|
Python
|
DOT_assignment/run.py
|
kachark/FormFlight
|
94189581ecd28ab5d9d30e2b171a3fa3296029a7
|
[
"MIT"
] | 5
|
2019-11-03T06:35:28.000Z
|
2021-05-25T16:21:28.000Z
|
DOT_assignment/run.py
|
kachark/FormFlight
|
94189581ecd28ab5d9d30e2b171a3fa3296029a7
|
[
"MIT"
] | null | null | null |
DOT_assignment/run.py
|
kachark/FormFlight
|
94189581ecd28ab5d9d30e2b171a3fa3296029a7
|
[
"MIT"
] | null | null | null |
""" @file run.py
"""
from time import time, process_time
import pandas as pd
from DOT_assignment import agents as ag
from DOT_assignment import controls
from DOT_assignment import dynamics
from DOT_assignment import engine
from DOT_assignment import systems
from DOT_assignment import post_process
#######################
# These functions perform the individual simulations. They organize the intial conditions into
# appropriate data structures (ie. Agents, Points), setup the simulation engine, simulation scenario
# (ie. formations) and return the results and diagnostics
# TODO update function naming convention
######################
def run_identical_doubleint_2D(dx, du, statespace, x0, ltidyn, poltrack, apol,
assignment_epoch, nagents, ntargets, collisions, collision_tol, dt=0.01, maxtime=10):
""" Setup the engine and simulation scenario
Input:
- dx: agent statesize
- du: agent control input size
- statespace: dict describing agent position, velocity etc. components
- x0: initial agent, target, target terminal states
- ltidyn: agent dynamics model (homogeneous across agent swarm)
- dyn_target: list of target dynamic models
- poltrack: agent control policy (homogeneous across agent swarm)
- poltargets: list of target control policies
- apol: assignment policy
- assignment_epoch: number of ticks at which to perform assignment
- nagents: number of agents
- ntarges: number of targets
- collisions: collisions on/off
- collision_tol:abosolute distance between an agent and tolerance to count as collision
- dt: engine tick size
- maxtime: simulation time
Output: Returns simulation results and diagnostics
"""
dim = 2
agents = [ag.TrackingAgent(dx, du, statespace, dim, ltidyn, poltrack[ii]) for ii in range(nagents)]
targets = [ag.Point(dx, du, statespace, dim) for ii in range(ntargets)]
# setup the scenario and engine
sys = systems.OneVOneFormation(agents, targets, apol, assignment_epoch)
eng = engine.Engine(dim=dim, dt=dt, maxtime=maxtime, collisions=collisions, collision_tol=collision_tol)
# TODO time the simulation
start_run_time = process_time()
eng.run(x0, sys)
elapsed_run_time = process_time() - start_run_time
opt_asst = sys.optimal_assignment
# post processing
polagents = [agent.pol for agent in agents]
# TODO
output = [agents, targets, eng.df, poltrack, nagents, ntargets, sys.costs, polagents, opt_asst, apol]
### diagnostics
runtime_diagnostics = eng.diagnostics
runtime = pd.DataFrame([elapsed_run_time])
runtime_diagnostics = pd.concat([runtime_diagnostics, runtime], axis=1, ignore_index=True)
diagnostics = [runtime_diagnostics]
return output, diagnostics
def run_identical_doubleint_3D(dx, du, statespace, x0, ltidyn, poltrack, apol,
assignment_epoch, nagents, ntargets, collisions, collision_tol, dt=0.01, maxtime=10):
""" Setup the engine and simulation scenario
Input:
- dx: agent statesize
- du: agent control input size
- statespace: dict describing agent position, velocity etc. components
- x0: initial agent, target, target terminal states
- ltidyn: agent dynamics model (homogeneous across agent swarm)
- dyn_target: list of target dynamic models
- poltrack: agent control policy (homogeneous across agent swarm)
- poltargets: list of target control policies
- apol: assignment policy
- assignment_epoch: number of ticks at which to perform assignment
- nagents: number of agents
- ntarges: number of targets
- collisions: collisions on/off
- collision_tol:abosolute distance between an agent and tolerance to count as collision
- dt: engine tick size
- maxtime: simulation time
Output: Returns simulation results and diagnostics
"""
dim = 3
agents = [ag.TrackingAgent(dx, du, statespace, dim, ltidyn, poltrack[ii]) for ii in range(nagents)]
targets = [ag.Point(dx, du, statespace, dim) for ii in range(ntargets)]
# setup the scenario and engine
sys = systems.OneVOneFormation(agents, targets, apol, assignment_epoch)
eng = engine.Engine(dim=dim, dt=dt, maxtime=maxtime, collisions=collisions, collision_tol=collision_tol)
# TODO time the simulation
start_run_time = process_time()
eng.run(x0, sys)
elapsed_run_time = process_time() - start_run_time
opt_asst = sys.optimal_assignment
# post processing
polagents = [agent.pol for agent in agents]
output = [agents, targets, eng.df, poltrack, nagents, ntargets, sys.costs, polagents, opt_asst, apol]
### diagnostics
runtime_diagnostics = eng.diagnostics
runtime = pd.DataFrame([elapsed_run_time])
runtime_diagnostics = pd.concat([runtime_diagnostics, runtime], axis=1, ignore_index=True)
diagnostics = [runtime_diagnostics]
return output, diagnostics
def run_identical_linearized_quadcopter_2D(dx, du, statespace, x0, ltidyn, poltrack, apol,
assignment_epoch, nagents, ntargets, collisions, collision_tol, dt=0.01, maxtime=10):
""" Setup the engine and simulation scenario
Input:
- dx: agent statesize
- du: agent control input size
- statespace: dict describing agent position, velocity etc. components
- x0: initial agent, target, target terminal states
- ltidyn: agent dynamics model (homogeneous across agent swarm)
- dyn_target: list of target dynamic models
- poltrack: agent control policy (homogeneous across agent swarm)
- poltargets: list of target control policies
- apol: assignment policy
- assignment_epoch: number of ticks at which to perform assignment
- nagents: number of agents
- ntarges: number of targets
- collisions: collisions on/off
- collision_tol:abosolute distance between an agent and tolerance to count as collision
- dt: engine tick size
- maxtime: simulation time
Output: Returns simulation results and diagnostics
"""
dim = 2
agents = [ag.TrackingAgent(dx, du, statespace, dim, ltidyn, poltrack[ii]) for ii in range(nagents)]
targets = [ag.Point(dx, du, statespace, dim) for ii in range(ntargets)]
# setup the scenario and engine
sys = systems.OneVOneFormation(agents, targets, apol, assignment_epoch)
eng = engine.Engine(dim=dim, dt=dt, maxtime=maxtime, collisions=collisions, collision_tol=collision_tol)
# TODO time the simulation
start_run_time = process_time()
eng.run(x0, sys)
elapsed_run_time = process_time() - start_run_time
opt_asst = sys.optimal_assignment
# post processing
polagents = [agent.pol for agent in agents]
output = [agents, targets, eng.df, poltrack, nagents, ntargets, sys.costs, polagents, opt_asst, apol]
### diagnostics
runtime_diagnostics = eng.diagnostics
runtime = pd.DataFrame([elapsed_run_time])
runtime_diagnostics = pd.concat([runtime_diagnostics, runtime], axis=1, ignore_index=True)
diagnostics = [runtime_diagnostics]
return output, diagnostics
def run_identical_linearized_quadcopter_3D(dx, du, statespace, x0, ltidyn, poltrack, apol,
assignment_epoch, nagents, ntargets, collisions, collision_tol, dt=0.01, maxtime=10):
""" Setup the engine and simulation scenario
Input:
- dx: agent statesize
- du: agent control input size
- statespace: dict describing agent position, velocity etc. components
- x0: initial agent, target, target terminal states
- ltidyn: agent dynamics model (homogeneous across agent swarm)
- poltrack: agent control policy (homogeneous across agent swarm)
- apol: assignment policy
- assignment_epoch: number of ticks at which to perform assignment
- nagents: number of agents
- ntargets: number of targets
- collisions: collisions on/off
- collision_tol:abosolute distance between an agent and tolerance to count as collision
- dt: engine tick size
- maxtime: simulation time
Output: Returns simulation results and diagnostics
"""
dim = 3
agents = [ag.TrackingAgent(dx, du, statespace, dim, ltidyn, poltrack[ii]) for ii in range(nagents)]
targets = [ag.Point(dx, du, statespace, dim) for ii in range(ntargets)]
# setup the scenario and engine
sys = systems.OneVOneFormation(agents, targets, apol, assignment_epoch)
eng = engine.Engine(dim=dim, dt=dt, maxtime=maxtime, collisions=collisions, collision_tol=collision_tol)
# time the simulation
start_run_time = process_time()
eng.run(x0, sys)
elapsed_run_time = process_time() - start_run_time
opt_asst = sys.optimal_assignment
# post processing
polagents = [agent.pol for agent in agents]
# TODO - need to clarify what each of these fields are
output = [agents, targets, eng.df, poltrack, nagents, ntargets, sys.costs, polagents, opt_asst, apol]
### diagnostics
runtime_diagnostics = eng.diagnostics
runtime = pd.DataFrame([elapsed_run_time])
runtime_diagnostics = pd.concat([runtime_diagnostics, runtime], axis=1, ignore_index=True)
diagnostics = [runtime_diagnostics]
return output, diagnostics
| 38.25098
| 108
| 0.679516
| 1,158
| 9,754
| 5.615717
| 0.132988
| 0.029525
| 0.025834
| 0.033215
| 0.913886
| 0.913886
| 0.913886
| 0.913886
| 0.905736
| 0.905736
| 0
| 0.00595
| 0.24185
| 9,754
| 254
| 109
| 38.401575
| 0.873428
| 0.454378
| 0
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019685
| 0
| 1
| 0.05
| false
| 0
| 0.1
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6768afc7ce023fbaabbc6a068abbbbbb4f7ac02a
| 8,212
|
py
|
Python
|
tests/test_mrs/test_fitting.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | null | null | null |
tests/test_mrs/test_fitting.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | null | null | null |
tests/test_mrs/test_fitting.py
|
hjiang1/suspect
|
f8b320b16bbd73a95d58eea1660921d6cad16f36
|
[
"MIT"
] | null | null | null |
from suspect.fitting import singlet
from suspect import basis, MRSData
import numpy
import pytest
import random
numpy.random.seed(1024)
@pytest.fixture
def fixed_fid():
time_axis = numpy.arange(0, 0.512, 5e-4)
fid = basis.gaussian(time_axis, 0, 0, 50.0) + 0.00001 * (numpy.random.rand(1024) - 0.5)
return fid
@pytest.fixture
def fixed_fid_sum():
time_axis = numpy.arange(0, 0.512, 5e-4)
fid = basis.gaussian(time_axis, 0, 0, 50.0) + 0.00001 * (numpy.random.rand(1024) - 0.5)
fid2 = basis.gaussian(time_axis, 200, 0, 50.0)
return fid + fid2
def test_gaussian(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# Original test with all parameters passed in; correct data types; integer values
model = {
"phase0": 0,
"phase1": 0,
"pcr": {
"amplitude": 1,
"fwhm": {
"value": 45,
"min": 42.0,
"max": 55
},
"phase": "0",
"frequency": 0.0
}
}
fitting_result = singlet.fit(data, model)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["fwhm"], 50.0, rtol=1e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["amplitude"], 1.0, rtol=2e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["frequency"], 0.0, atol=1e-1)
numpy.testing.assert_allclose(fitting_result["fit"], fixed_fid, atol=0.001)
assert(isinstance(fitting_result["fit"], MRSData))
def test_bad_param(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# invalid key added to width dict, to test whether KeyError is raised
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
"avg": 47 # this is the bad key
},
"phase": "0",
"frequency": 0.0
}
}
with pytest.raises(KeyError):
fitting_result = singlet.fit(data, model)
def test_missing_param(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# No width value passed in, to test whether KeyError is raised
model = {
"phase0": 0,
"phase1": 0,
"pcr": {
"amplitude": 1,
"fwhm": {
# "value": 45,
"min": 42,
"max": 55,
},
"phase": "0",
"frequency": 0
}
}
with pytest.raises(KeyError):
fitting_result = singlet.fit(data, model)
def test_missing_peak_phase(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# No phase value passed in, to test whether phase is fixed to 0 by default
model = {
"phase0": 0,
"phase1": 0,
"pcr": {
"amplitude": 1,
"fwhm": {
"value": 45,
"min": 42,
"max": 55,
},
# "phase": "0",
"frequency": 0
}
}
fitting_result = singlet.fit(data, model)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["fwhm"], 50.0, rtol=5e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["amplitude"], 1.0, rtol=5e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["frequency"], 0.0, atol=1e-1)
numpy.testing.assert_allclose(fitting_result["fit"], fixed_fid, atol=0.001)
def test_missing_global_phase(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# None value supplied for phase0 and phase1, to test whether TypeError is raised
model = {
"phase0": None,
"phase1": None,
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": 0.0
}
}
with pytest.raises(TypeError):
fitting_result = singlet.fit(data, model)
def test_bad_param_value(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
# None value supplied for amplitude, to test whether TypeError is raised
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr": {
"amplitude": None,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": 0.0
}
}
with pytest.raises(TypeError):
fitting_result = singlet.fit(data, model)
def test_circular_dependencies(fixed_fid):
data = MRSData(fixed_fid, 5e-4, 123)
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": "pcr2_frequency+200"
},
"pcr2": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": "pcr_frequency-200"
}
}
with pytest.raises(ReferenceError):
fitting_result = singlet.fit(data, model)
def test_dependencies(fixed_fid_sum):
data = MRSData(fixed_fid_sum, 5e-4, 123)
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": 0
},
"pcr2": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": "pcr_frequency+200"
}
}
fitting_result = singlet.fit(data, model)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["fwhm"], 50.0, rtol=1e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["amplitude"], 1.0, rtol=2e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["frequency"], 0.0, atol=1e-1)
numpy.testing.assert_allclose(fitting_result["fit"], fixed_fid_sum, atol=0.001)
def test_reordered_dependencies(fixed_fid_sum):
data = MRSData(fixed_fid_sum, 5e-4, 123)
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": "pcr2_frequency+200"
},
"pcr2": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": 0
}
}
fitting_result = singlet.fit(data, model)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["fwhm"], 50.0, rtol=1e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["amplitude"], 1.0, rtol=2e-2)
numpy.testing.assert_allclose(fitting_result["model"]["pcr"]["frequency"], 200.0, atol=1e-1)
numpy.testing.assert_allclose(fitting_result["fit"], fixed_fid_sum, atol=0.001)
def test_missing_dependencies(fixed_fid_sum):
data = MRSData(fixed_fid_sum, 5e-4, 123)
model = {
"phase0": 0.0,
"phase1": 0.0,
"pcr2": {
"amplitude": 1.0,
"frequency": "pcr3_frequency+200",
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
},
"pcr": {
"amplitude": 1.0,
"fwhm": {
"value": 45.0,
"min": 42.0,
"max": 55.0,
},
"phase": "0",
"frequency": 0
}
}
with pytest.raises(NameError):
fitting_result = singlet.fit(data, model)
| 25.582555
| 96
| 0.4849
| 939
| 8,212
| 4.124601
| 0.113951
| 0.090627
| 0.074361
| 0.10741
| 0.860057
| 0.843532
| 0.814356
| 0.810225
| 0.779757
| 0.751356
| 0
| 0.081205
| 0.361179
| 8,212
| 320
| 97
| 25.6625
| 0.657072
| 0.058208
| 0
| 0.706827
| 0
| 0
| 0.128301
| 0
| 0
| 0
| 0
| 0
| 0.068273
| 1
| 0.048193
| false
| 0
| 0.02008
| 0
| 0.076305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67973c9851a83cf477042a8dc4e7a0048a4ca985
| 239
|
py
|
Python
|
jsonschema_extended/__init__.py
|
janw/jsonschema-extended
|
31f19106f3b3a9b90c7e0cdbdc079c13f295f43e
|
[
"Apache-2.0"
] | null | null | null |
jsonschema_extended/__init__.py
|
janw/jsonschema-extended
|
31f19106f3b3a9b90c7e0cdbdc079c13f295f43e
|
[
"Apache-2.0"
] | null | null | null |
jsonschema_extended/__init__.py
|
janw/jsonschema-extended
|
31f19106f3b3a9b90c7e0cdbdc079c13f295f43e
|
[
"Apache-2.0"
] | null | null | null |
from .format_checker import create_format_checker # noqa: F401
from .format_checker import extended_format_checker # noqa: F401
from .validator import create_validator # noqa: F401
from .validator import ExtendedValidator # noqa: F401
| 47.8
| 65
| 0.8159
| 31
| 239
| 6.064516
| 0.322581
| 0.276596
| 0.191489
| 0.244681
| 0.489362
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057971
| 0.133891
| 239
| 4
| 66
| 59.75
| 0.850242
| 0.179916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67fd7dd0998911a5651eebd362ee965855f7c2a3
| 81
|
py
|
Python
|
tests/gource/test_log.py
|
wraithy/gource-summary
|
33f6946f7a19d7b26f9291a72816557dad37882a
|
[
"MIT"
] | null | null | null |
tests/gource/test_log.py
|
wraithy/gource-summary
|
33f6946f7a19d7b26f9291a72816557dad37882a
|
[
"MIT"
] | null | null | null |
tests/gource/test_log.py
|
wraithy/gource-summary
|
33f6946f7a19d7b26f9291a72816557dad37882a
|
[
"MIT"
] | null | null | null |
from ..context import remote_gource
def test_log_lines_from_commit():
pass
| 13.5
| 35
| 0.777778
| 12
| 81
| 4.833333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.160494
| 81
| 5
| 36
| 16.2
| 0.852941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
67fecdd82f36fb69dc13b658463471139dd49b4e
| 167
|
py
|
Python
|
gen3_augur_pyutils/__init__.py
|
qiongl-CTDS/gen3-augur
|
c7eb2c5fe6b048924308cf3c91f8a82b3f7bbfd9
|
[
"Apache-2.0"
] | null | null | null |
gen3_augur_pyutils/__init__.py
|
qiongl-CTDS/gen3-augur
|
c7eb2c5fe6b048924308cf3c91f8a82b3f7bbfd9
|
[
"Apache-2.0"
] | null | null | null |
gen3_augur_pyutils/__init__.py
|
qiongl-CTDS/gen3-augur
|
c7eb2c5fe6b048924308cf3c91f8a82b3f7bbfd9
|
[
"Apache-2.0"
] | 1
|
2021-09-15T22:28:08.000Z
|
2021-09-15T22:28:08.000Z
|
from __future__ import absolute_import
from gen3_augur_pyutils.subcommands.base import Subcommand
from gen3_augur_pyutils.subcommands.parse_genbank import ParseGenBank
| 55.666667
| 69
| 0.91018
| 22
| 167
| 6.454545
| 0.590909
| 0.112676
| 0.183099
| 0.28169
| 0.43662
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012821
| 0.065868
| 167
| 3
| 69
| 55.666667
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db1251321e57c3c41657fe103d13549f824ee63c
| 132,893
|
py
|
Python
|
mysite/patterns/66.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 19
|
2016-06-17T23:36:27.000Z
|
2020-01-13T16:41:55.000Z
|
mysite/patterns/66.py
|
BioinfoNet/prepub
|
e19c48cabf8bd22736dcef9308a5e196cfd8119a
|
[
"MIT"
] | 13
|
2016-06-06T12:57:05.000Z
|
2019-02-05T02:21:00.000Z
|
patterns/66.py
|
OmnesRes/GRIMMER
|
173c99ebdb6a9edb1242d24a791d0c5d778ff643
|
[
"MIT"
] | 7
|
2017-03-28T18:12:22.000Z
|
2021-06-16T09:32:59.000Z
|
pattern_zero=[0.0, 0.01492194674, 0.02938475666, 0.0303030303, 0.04338842975, 0.04522497704, 0.05693296602, 0.05968778696, 0.06060606061, 0.07001836547, 0.07369146006, 0.07552800735, 0.0826446281, 0.08723599633, 0.08999081726, 0.09090909091, 0.0948117539, 0.10032139578, 0.10399449036, 0.10583103765, 0.10651974288, 0.1129476584, 0.11753902663, 0.11776859504, 0.12029384757, 0.12121212121, 0.12511478421, 0.12855831038, 0.13062442608, 0.13429752066, 0.13613406795, 0.13682277319, 0.13888888889, 0.14325068871, 0.14784205693, 0.14807162534, 0.14876033058, 0.15059687787, 0.15151515152, 0.15541781451, 0.15817263545, 0.15886134068, 0.16092745638, 0.16460055096, 0.16643709826, 0.16712580349, 0.16919191919, 0.17355371901, 0.17561983471, 0.17814508724, 0.17837465565, 0.17906336088, 0.18089990817, 0.18181818182, 0.18365472911, 0.18572084481, 0.18847566575, 0.18916437098, 0.19123048669, 0.19490358127, 0.19674012856, 0.19742883379, 0.19834710744, 0.1994949495, 0.20385674931, 0.20500459137, 0.20592286501, 0.20844811754, 0.20867768595, 0.20936639119, 0.21120293848, 0.21212121212, 0.21395775941, 0.21602387512, 0.21694214876, 0.21877869605, 0.21946740129, 0.22153351699, 0.22222222222, 0.22520661157, 0.22704315886, 0.2277318641, 0.22865013774, 0.2297979798, 0.23140495868, 0.23415977961, 0.23530762167, 0.23622589532, 0.23875114784, 0.23898071625, 0.23966942149, 0.24150596878, 0.24173553719, 0.24242424242, 0.24426078972, 0.24632690542, 0.24724517906, 0.2479338843, 0.24908172635, 0.24977043159, 0.25, 0.25183654729, 0.25252525253, 0.25550964187, 0.25734618916, 0.2580348944, 0.25895316804, 0.2601010101, 0.26170798898, 0.26446280992, 0.26561065197, 0.26652892562, 0.26905417815, 0.26928374656, 0.26997245179, 0.27180899908, 0.27203856749, 0.27272727273, 0.27456382002, 0.27662993572, 0.27754820937, 0.2782369146, 0.27938475666, 0.28007346189, 0.2803030303, 0.28213957759, 0.28282828283, 0.28581267218, 0.28764921947, 0.2883379247, 0.28925619835, 0.2904040404, 0.29201101928, 0.29476584022, 0.29591368228, 0.29683195592, 0.29935720845, 0.29958677686, 0.30027548209, 0.30211202939, 0.3023415978, 0.30303030303, 0.30486685032, 0.30693296602, 0.30785123967, 0.3085399449, 0.30968778696, 0.3103764922, 0.31060606061, 0.3124426079, 0.31313131313, 0.31611570248, 0.31795224977, 0.31864095501, 0.31955922865, 0.32070707071, 0.32231404959, 0.32506887052, 0.32621671258, 0.32713498623, 0.32966023875, 0.32988980716, 0.3305785124, 0.33241505969, 0.3326446281, 0.33333333333, 0.33516988062, 0.33723599633, 0.33815426997, 0.33884297521, 0.33999081726, 0.3406795225, 0.34090909091, 0.3427456382, 0.34343434343, 0.34641873278, 0.34825528007, 0.34894398531, 0.34986225895, 0.35101010101, 0.35261707989, 0.35537190083, 0.35651974288, 0.35743801653, 0.35996326905, 0.36019283747, 0.3608815427, 0.36271808999, 0.3629476584, 0.36363636364, 0.36547291093, 0.36753902663, 0.36845730028, 0.36914600551, 0.37029384757, 0.3709825528, 0.37121212121, 0.3730486685, 0.37373737374, 0.37672176309, 0.37855831038, 0.37924701561, 0.38016528926, 0.38131313131, 0.38292011019, 0.38567493113, 0.38682277319, 0.38774104683, 0.39026629936, 0.39049586777, 0.391184573, 0.39302112029, 0.39325068871, 0.39393939394, 0.39577594123, 0.39784205693, 0.39876033058, 0.39944903581, 0.40059687787, 0.4012855831, 0.40151515152, 0.40335169881, 0.40404040404, 0.40702479339, 0.40886134068, 0.40955004591, 0.41046831956, 0.41161616162, 0.4132231405, 0.41597796143, 0.41712580349, 0.41804407714, 0.42056932966, 0.42079889807, 0.42148760331, 0.4233241506, 0.42355371901, 0.42424242424, 0.42607897153, 0.42814508724, 0.42906336088, 0.42975206612, 0.43089990817, 0.43158861341, 0.43181818182, 0.43365472911, 0.43434343434, 0.43732782369, 0.43916437098, 0.43985307622, 0.44077134986, 0.44191919192, 0.4435261708, 0.44628099174, 0.44742883379, 0.44834710744, 0.45087235996, 0.45110192838, 0.45179063361, 0.4536271809, 0.45385674931, 0.45454545455, 0.45638200184, 0.45844811754, 0.45936639119, 0.46005509642, 0.46120293848, 0.46189164371, 0.46212121212, 0.46395775941, 0.46464646465, 0.46763085399, 0.46946740129, 0.47015610652, 0.47107438017, 0.47222222222, 0.4738292011, 0.47658402204, 0.4777318641, 0.47865013774, 0.48117539027, 0.48140495868, 0.48209366391, 0.4839302112, 0.48415977961, 0.48484848485, 0.48668503214, 0.48875114784, 0.48966942149, 0.49035812672, 0.49150596878, 0.49219467401, 0.49242424242, 0.49426078972, 0.49494949495, 0.4979338843, 0.49977043159, 0.50045913682, 0.50137741047, 0.50252525253, 0.50413223141, 0.50688705234, 0.5080348944, 0.50895316804, 0.51147842057, 0.51170798898, 0.51239669422, 0.51423324151, 0.51446280992, 0.51515151515, 0.51698806244, 0.51905417815, 0.51997245179, 0.52066115703, 0.52180899908, 0.52249770432, 0.52272727273, 0.52456382002, 0.52525252525, 0.5282369146, 0.53007346189, 0.53076216713, 0.53168044077, 0.53282828283, 0.53443526171, 0.53719008265, 0.5383379247, 0.53925619835, 0.54178145087, 0.54201101928, 0.54269972452, 0.54453627181, 0.54476584022, 0.54545454546, 0.54729109275, 0.54935720845, 0.55027548209, 0.55096418733, 0.55211202939, 0.55280073462, 0.55303030303, 0.55486685032, 0.55555555556, 0.5585399449, 0.5603764922, 0.56106519743, 0.56198347107, 0.56313131313, 0.56473829201, 0.56749311295, 0.56864095501, 0.56955922865, 0.57208448118, 0.57231404959, 0.57300275482, 0.57483930211, 0.57506887052, 0.57575757576, 0.57759412305, 0.57966023875, 0.5805785124, 0.58126721763, 0.58241505969, 0.58310376492, 0.58333333333, 0.58516988062, 0.58585858586, 0.58884297521, 0.5906795225, 0.59136822773, 0.59228650138, 0.59343434343, 0.59504132231, 0.59779614325, 0.59894398531, 0.59986225895, 0.60238751148, 0.60261707989, 0.60330578512, 0.60514233242, 0.60537190083, 0.60606060606, 0.60789715335, 0.60996326905, 0.6108815427, 0.61157024793, 0.61271808999, 0.61340679523, 0.61363636364, 0.61547291093, 0.61616161616, 0.61914600551, 0.6209825528, 0.62167125804, 0.62258953168, 0.62373737374, 0.62534435262, 0.62809917355, 0.62924701561, 0.63016528926, 0.63269054178, 0.63292011019, 0.63360881543, 0.63544536272, 0.63567493113, 0.63636363636, 0.63820018366, 0.64026629936, 0.641184573, 0.64187327824, 0.64302112029, 0.64370982553, 0.64393939394, 0.64577594123, 0.64646464647, 0.64944903581, 0.6512855831, 0.65197428834, 0.65289256198, 0.65404040404, 0.65564738292, 0.65840220386, 0.65955004591, 0.66046831956, 0.66299357208, 0.6632231405, 0.66391184573, 0.66574839302, 0.66597796143, 0.66666666667, 0.66850321396, 0.67056932966, 0.67148760331, 0.67217630854, 0.6733241506, 0.67401285583, 0.67424242424, 0.67607897153, 0.67676767677, 0.67975206612, 0.68158861341, 0.68227731864, 0.68319559229, 0.68434343434, 0.68595041322, 0.68870523416, 0.68985307622, 0.69077134986, 0.69329660239, 0.6935261708, 0.69421487603, 0.69605142332, 0.69628099174, 0.69696969697, 0.69880624426, 0.70087235996, 0.70179063361, 0.70247933884, 0.7036271809, 0.70431588613, 0.70454545455, 0.70638200184, 0.70707070707, 0.71005509642, 0.71189164371, 0.71258034894, 0.71349862259, 0.71464646465, 0.71625344353, 0.71900826446, 0.72015610652, 0.72107438017, 0.72359963269, 0.7238292011, 0.72451790634, 0.72635445363, 0.72658402204, 0.72727272727, 0.72910927456, 0.73117539027, 0.73209366391, 0.73278236915, 0.7339302112, 0.73461891644, 0.73484848485, 0.73668503214, 0.73737373737, 0.74035812672, 0.74219467401, 0.74288337925, 0.74380165289, 0.74494949495, 0.74655647383, 0.74931129477, 0.75045913682, 0.75137741047, 0.75390266299, 0.75413223141, 0.75482093664, 0.75665748393, 0.75688705234, 0.75757575758, 0.75941230487, 0.76147842057, 0.76239669422, 0.76308539945, 0.76423324151, 0.76492194674, 0.76515151515, 0.76698806244, 0.76767676768, 0.77066115703, 0.77249770432, 0.77318640955, 0.7741046832, 0.77525252525, 0.77685950413, 0.77961432507, 0.78076216713, 0.78168044077, 0.7842056933, 0.78443526171, 0.78512396694, 0.78696051423, 0.78719008265, 0.78787878788, 0.78971533517, 0.79178145087, 0.79269972452, 0.79338842975, 0.79453627181, 0.79522497704, 0.79545454546, 0.79729109275, 0.79797979798, 0.80096418733, 0.80280073462, 0.80348943985, 0.8044077135, 0.80555555556, 0.80716253444, 0.80991735537, 0.81106519743, 0.81198347107, 0.8145087236, 0.81473829201, 0.81542699725, 0.81726354454, 0.81749311295, 0.81818181818, 0.82001836547, 0.82208448118, 0.82300275482, 0.82369146006, 0.82483930211, 0.82552800735, 0.82575757576, 0.82759412305, 0.82828282828, 0.83126721763, 0.83310376492, 0.83379247016, 0.8347107438, 0.83585858586, 0.83746556474, 0.84022038568, 0.84136822773, 0.84228650138, 0.8448117539, 0.84504132231, 0.84573002755, 0.84756657484, 0.84779614325, 0.84848484849, 0.85032139578, 0.85238751148, 0.85330578512, 0.85399449036, 0.85514233242, 0.85583103765, 0.85606060606, 0.85789715335, 0.85858585859, 0.86157024793, 0.86340679523, 0.86409550046, 0.86501377411, 0.86616161616, 0.86776859504, 0.87052341598, 0.87167125804, 0.87258953168, 0.87511478421, 0.87534435262, 0.87603305785, 0.87786960514, 0.87809917355, 0.87878787879, 0.88062442608, 0.88269054178, 0.88360881543, 0.88429752066, 0.88544536272, 0.88613406795, 0.88636363636, 0.88820018366, 0.88888888889, 0.89187327824, 0.89370982553, 0.89439853076, 0.89531680441, 0.89646464647, 0.89807162534, 0.90082644628, 0.90197428834, 0.90289256198, 0.90541781451, 0.90564738292, 0.90633608815, 0.90817263545, 0.90840220386, 0.90909090909, 0.91092745638, 0.91299357208, 0.91391184573, 0.91460055096, 0.91574839302, 0.91643709826, 0.91666666667, 0.91850321396, 0.91919191919, 0.92217630854, 0.92401285583, 0.92470156107, 0.92561983471, 0.92676767677, 0.92837465565, 0.93112947658, 0.93227731864, 0.93319559229, 0.93572084481, 0.93595041322, 0.93663911846, 0.93847566575, 0.93870523416, 0.93939393939, 0.94123048669, 0.94329660239, 0.94421487603, 0.94490358127, 0.94605142332, 0.94674012856, 0.94696969697, 0.94880624426, 0.9494949495, 0.95247933884, 0.95431588613, 0.95500459137, 0.95592286501, 0.95707070707, 0.95867768595, 0.96143250689, 0.96258034894, 0.96349862259, 0.96602387512, 0.96625344353, 0.96694214876, 0.96877869605, 0.96900826446, 0.9696969697, 0.97153351699, 0.97359963269, 0.97451790634, 0.97520661157, 0.97635445363, 0.97704315886, 0.97727272727, 0.97910927456, 0.9797979798, 0.98278236915, 0.98461891644, 0.98530762167, 0.98622589532, 0.98737373737, 0.98898071625, 0.99173553719, 0.99288337925, 0.99380165289, 0.99632690542, 0.99655647383, 0.99724517906, 0.99908172635, 0.99931129477]
pattern_odd=[0.0, 0.00183654729, 0.00390266299, 0.00482093664, 0.00550964187, 0.00665748393, 0.00734618916, 0.00757575758, 0.00941230487, 0.0101010101, 0.01308539945, 0.01492194674, 0.01561065197, 0.01652892562, 0.01767676768, 0.01928374656, 0.02203856749, 0.02318640955, 0.0241046832, 0.02662993572, 0.02685950413, 0.02754820937, 0.02938475666, 0.02961432507, 0.0303030303, 0.03213957759, 0.0342056933, 0.03512396694, 0.03581267218, 0.03696051423, 0.03764921947, 0.03787878788, 0.03971533517, 0.0404040404, 0.04338842975, 0.04522497704, 0.04591368228, 0.04683195592, 0.04797979798, 0.04958677686, 0.0523415978, 0.05348943985, 0.0544077135, 0.05693296602, 0.05716253444, 0.05785123967, 0.05968778696, 0.05991735537, 0.06060606061, 0.0624426079, 0.0645087236, 0.06542699725, 0.06611570248, 0.06726354454, 0.06795224977, 0.06818181818, 0.07001836547, 0.07070707071, 0.07369146006, 0.07552800735, 0.07621671258, 0.07713498623, 0.07828282828, 0.07988980716, 0.0826446281, 0.08379247016, 0.0847107438, 0.08723599633, 0.08746556474, 0.08815426997, 0.08999081726, 0.09022038568, 0.09090909091, 0.0927456382, 0.0948117539, 0.09573002755, 0.09641873278, 0.09756657484, 0.09825528007, 0.09848484849, 0.10032139578, 0.10101010101, 0.10399449036, 0.10583103765, 0.10651974288, 0.10743801653, 0.10858585859, 0.11019283747, 0.1129476584, 0.11409550046, 0.11501377411, 0.11753902663, 0.11776859504, 0.11845730028, 0.12029384757, 0.12052341598, 0.12121212121, 0.1230486685, 0.12511478421, 0.12603305785, 0.12672176309, 0.12786960514, 0.12855831038, 0.12878787879, 0.13062442608, 0.13131313131, 0.13429752066, 0.13613406795, 0.13682277319, 0.13774104683, 0.13888888889, 0.14049586777, 0.14325068871, 0.14439853076, 0.14531680441, 0.14784205693, 0.14807162534, 0.14876033058, 0.15059687787, 0.15082644628, 0.15151515152, 0.15335169881, 0.15541781451, 0.15633608815, 0.15702479339, 0.15817263545, 0.15886134068, 0.15909090909, 0.16092745638, 0.16161616162, 0.16460055096, 0.16643709826, 0.16712580349, 0.16804407714, 0.16919191919, 0.17079889807, 0.17355371901, 0.17470156107, 0.17561983471, 0.17814508724, 0.17837465565, 0.17906336088, 0.18089990817, 0.18112947658, 0.18181818182, 0.18365472911, 0.18572084481, 0.18663911846, 0.18732782369, 0.18847566575, 0.18916437098, 0.18939393939, 0.19123048669, 0.19191919192, 0.19490358127, 0.19674012856, 0.19742883379, 0.19834710744, 0.1994949495, 0.20110192838, 0.20385674931, 0.20500459137, 0.20592286501, 0.20844811754, 0.20867768595, 0.20936639119, 0.21120293848, 0.21143250689, 0.21212121212, 0.21395775941, 0.21602387512, 0.21694214876, 0.21763085399, 0.21877869605, 0.21946740129, 0.2196969697, 0.22153351699, 0.22222222222, 0.22520661157, 0.22704315886, 0.2277318641, 0.22865013774, 0.2297979798, 0.23140495868, 0.23415977961, 0.23530762167, 0.23622589532, 0.23875114784, 0.23898071625, 0.23966942149, 0.24150596878, 0.24173553719, 0.24242424242, 0.24426078972, 0.24632690542, 0.24724517906, 0.2479338843, 0.24908172635, 0.24977043159, 0.25, 0.25183654729, 0.25252525253, 0.25550964187, 0.25734618916, 0.2580348944, 0.25895316804, 0.2601010101, 0.26170798898, 0.26446280992, 0.26561065197, 0.26652892562, 0.26905417815, 0.26928374656, 0.26997245179, 0.27180899908, 0.27203856749, 0.27272727273, 0.27456382002, 0.27662993572, 0.27754820937, 0.2782369146, 0.27938475666, 0.28007346189, 0.2803030303, 0.28213957759, 0.28282828283, 0.28581267218, 0.28764921947, 0.2883379247, 0.28925619835, 0.2904040404, 0.29201101928, 0.29476584022, 0.29591368228, 0.29683195592, 0.29935720845, 0.29958677686, 0.30027548209, 0.30211202939, 0.3023415978, 0.30303030303, 0.30486685032, 0.30693296602, 0.30785123967, 0.3085399449, 0.30968778696, 0.3103764922, 0.31060606061, 0.3124426079, 0.31313131313, 0.31611570248, 0.31795224977, 0.31864095501, 0.31955922865, 0.32070707071, 0.32231404959, 0.32506887052, 0.32621671258, 0.32713498623, 0.32966023875, 0.32988980716, 0.3305785124, 0.33241505969, 0.3326446281, 0.33333333333, 0.33516988062, 0.33723599633, 0.33815426997, 0.33884297521, 0.33999081726, 0.3406795225, 0.34090909091, 0.3427456382, 0.34343434343, 0.34641873278, 0.34825528007, 0.34894398531, 0.34986225895, 0.35101010101, 0.35261707989, 0.35537190083, 0.35651974288, 0.35743801653, 0.35996326905, 0.36019283747, 0.3608815427, 0.36271808999, 0.3629476584, 0.36363636364, 0.36547291093, 0.36753902663, 0.36845730028, 0.36914600551, 0.37029384757, 0.3709825528, 0.37121212121, 0.3730486685, 0.37373737374, 0.37672176309, 0.37855831038, 0.37924701561, 0.38016528926, 0.38131313131, 0.38292011019, 0.38567493113, 0.38682277319, 0.38774104683, 0.39026629936, 0.39049586777, 0.391184573, 0.39302112029, 0.39325068871, 0.39393939394, 0.39577594123, 0.39784205693, 0.39876033058, 0.39944903581, 0.40059687787, 0.4012855831, 0.40151515152, 0.40335169881, 0.40404040404, 0.40702479339, 0.40886134068, 0.40955004591, 0.41046831956, 0.41161616162, 0.4132231405, 0.41597796143, 0.41712580349, 0.41804407714, 0.42056932966, 0.42079889807, 0.42148760331, 0.4233241506, 0.42355371901, 0.42424242424, 0.42607897153, 0.42814508724, 0.42906336088, 0.42975206612, 0.43089990817, 0.43158861341, 0.43181818182, 0.43365472911, 0.43434343434, 0.43732782369, 0.43916437098, 0.43985307622, 0.44077134986, 0.44191919192, 0.4435261708, 0.44628099174, 0.44742883379, 0.44834710744, 0.45087235996, 0.45110192838, 0.45179063361, 0.4536271809, 0.45385674931, 0.45454545455, 0.45638200184, 0.45844811754, 0.45936639119, 0.46005509642, 0.46120293848, 0.46189164371, 0.46212121212, 0.46395775941, 0.46464646465, 0.46763085399, 0.46946740129, 0.47015610652, 0.47107438017, 0.47222222222, 0.4738292011, 0.47658402204, 0.4777318641, 0.47865013774, 0.48117539027, 0.48140495868, 0.48209366391, 0.4839302112, 0.48415977961, 0.48484848485, 0.48668503214, 0.48875114784, 0.48966942149, 0.49035812672, 0.49150596878, 0.49219467401, 0.49242424242, 0.49426078972, 0.49494949495, 0.4979338843, 0.49977043159, 0.50045913682, 0.50137741047, 0.50252525253, 0.50413223141, 0.50688705234, 0.5080348944, 0.50895316804, 0.51147842057, 0.51170798898, 0.51239669422, 0.51423324151, 0.51446280992, 0.51515151515, 0.51698806244, 0.51905417815, 0.51997245179, 0.52066115703, 0.52180899908, 0.52249770432, 0.52272727273, 0.52456382002, 0.52525252525, 0.5282369146, 0.53007346189, 0.53076216713, 0.53168044077, 0.53282828283, 0.53443526171, 0.53719008265, 0.5383379247, 0.53925619835, 0.54178145087, 0.54201101928, 0.54269972452, 0.54453627181, 0.54476584022, 0.54545454546, 0.54729109275, 0.54935720845, 0.55027548209, 0.55096418733, 0.55211202939, 0.55280073462, 0.55303030303, 0.55486685032, 0.55555555556, 0.5585399449, 0.5603764922, 0.56106519743, 0.56198347107, 0.56313131313, 0.56473829201, 0.56749311295, 0.56864095501, 0.56955922865, 0.57208448118, 0.57231404959, 0.57300275482, 0.57483930211, 0.57506887052, 0.57575757576, 0.57759412305, 0.57966023875, 0.5805785124, 0.58126721763, 0.58241505969, 0.58310376492, 0.58333333333, 0.58516988062, 0.58585858586, 0.58884297521, 0.5906795225, 0.59136822773, 0.59228650138, 0.59343434343, 0.59504132231, 0.59779614325, 0.59894398531, 0.59986225895, 0.60238751148, 0.60261707989, 0.60330578512, 0.60514233242, 0.60537190083, 0.60606060606, 0.60789715335, 0.60996326905, 0.6108815427, 0.61157024793, 0.61271808999, 0.61340679523, 0.61363636364, 0.61547291093, 0.61616161616, 0.61914600551, 0.6209825528, 0.62167125804, 0.62258953168, 0.62373737374, 0.62534435262, 0.62809917355, 0.62924701561, 0.63016528926, 0.63269054178, 0.63292011019, 0.63360881543, 0.63544536272, 0.63567493113, 0.63636363636, 0.63820018366, 0.64026629936, 0.641184573, 0.64187327824, 0.64302112029, 0.64370982553, 0.64393939394, 0.64577594123, 0.64646464647, 0.64944903581, 0.6512855831, 0.65197428834, 0.65289256198, 0.65404040404, 0.65564738292, 0.65840220386, 0.65955004591, 0.66046831956, 0.66299357208, 0.6632231405, 0.66391184573, 0.66574839302, 0.66597796143, 0.66666666667, 0.66850321396, 0.67056932966, 0.67148760331, 0.67217630854, 0.6733241506, 0.67401285583, 0.67424242424, 0.67607897153, 0.67676767677, 0.67975206612, 0.68158861341, 0.68227731864, 0.68319559229, 0.68434343434, 0.68595041322, 0.68870523416, 0.68985307622, 0.69077134986, 0.69329660239, 0.6935261708, 0.69421487603, 0.69605142332, 0.69628099174, 0.69696969697, 0.69880624426, 0.70087235996, 0.70179063361, 0.70247933884, 0.7036271809, 0.70431588613, 0.70454545455, 0.70638200184, 0.70707070707, 0.71005509642, 0.71189164371, 0.71258034894, 0.71349862259, 0.71464646465, 0.71625344353, 0.71900826446, 0.72015610652, 0.72107438017, 0.72359963269, 0.7238292011, 0.72451790634, 0.72635445363, 0.72658402204, 0.72727272727, 0.72910927456, 0.73117539027, 0.73209366391, 0.73278236915, 0.7339302112, 0.73461891644, 0.73484848485, 0.73668503214, 0.73737373737, 0.74035812672, 0.74219467401, 0.74288337925, 0.74380165289, 0.74494949495, 0.74655647383, 0.74931129477, 0.75045913682, 0.75137741047, 0.75390266299, 0.75413223141, 0.75482093664, 0.75665748393, 0.75688705234, 0.75757575758, 0.75941230487, 0.76147842057, 0.76239669422, 0.76308539945, 0.76423324151, 0.76492194674, 0.76515151515, 0.76698806244, 0.76767676768, 0.77066115703, 0.77249770432, 0.77318640955, 0.7741046832, 0.77525252525, 0.77685950413, 0.77961432507, 0.78076216713, 0.78168044077, 0.7842056933, 0.78443526171, 0.78512396694, 0.78696051423, 0.78719008265, 0.78787878788, 0.78971533517, 0.79178145087, 0.79269972452, 0.79338842975, 0.79453627181, 0.79522497704, 0.79545454546, 0.79729109275, 0.79797979798, 0.80096418733, 0.80280073462, 0.80348943985, 0.8044077135, 0.80555555556, 0.80716253444, 0.80991735537, 0.81106519743, 0.81198347107, 0.8145087236, 0.81473829201, 0.81542699725, 0.81726354454, 0.81749311295, 0.81818181818, 0.82001836547, 0.82208448118, 0.82300275482, 0.82369146006, 0.82483930211, 0.82552800735, 0.82575757576, 0.82759412305, 0.82828282828, 0.83126721763, 0.83310376492, 0.83379247016, 0.8347107438, 0.83585858586, 0.83746556474, 0.84022038568, 0.84136822773, 0.84228650138, 0.8448117539, 0.84504132231, 0.84573002755, 0.84756657484, 0.84779614325, 0.84848484849, 0.85032139578, 0.85238751148, 0.85330578512, 0.85399449036, 0.85514233242, 0.85583103765, 0.85606060606, 0.85789715335, 0.85858585859, 0.86157024793, 0.86340679523, 0.86409550046, 0.86501377411, 0.86616161616, 0.86776859504, 0.87052341598, 0.87167125804, 0.87258953168, 0.87511478421, 0.87534435262, 0.87603305785, 0.87786960514, 0.87809917355, 0.87878787879, 0.88062442608, 0.88269054178, 0.88360881543, 0.88429752066, 0.88544536272, 0.88613406795, 0.88636363636, 0.88820018366, 0.88888888889, 0.89187327824, 0.89370982553, 0.89439853076, 0.89531680441, 0.89646464647, 0.89807162534, 0.90082644628, 0.90197428834, 0.90289256198, 0.90541781451, 0.90564738292, 0.90633608815, 0.90817263545, 0.90840220386, 0.90909090909, 0.91092745638, 0.91299357208, 0.91391184573, 0.91460055096, 0.91574839302, 0.91643709826, 0.91666666667, 0.91850321396, 0.91919191919, 0.92217630854, 0.92401285583, 0.92470156107, 0.92561983471, 0.92676767677, 0.92837465565, 0.93112947658, 0.93227731864, 0.93319559229, 0.93572084481, 0.93595041322, 0.93663911846, 0.93847566575, 0.93870523416, 0.93939393939, 0.94123048669, 0.94329660239, 0.94421487603, 0.94490358127, 0.94605142332, 0.94674012856, 0.94696969697, 0.94880624426, 0.9494949495, 0.95247933884, 0.95431588613, 0.95500459137, 0.95592286501, 0.95707070707, 0.95867768595, 0.96143250689, 0.96258034894, 0.96349862259, 0.96602387512, 0.96625344353, 0.96694214876, 0.96877869605, 0.96900826446, 0.9696969697, 0.97153351699, 0.97359963269, 0.97451790634, 0.97520661157, 0.97635445363, 0.97704315886, 0.97727272727, 0.97910927456, 0.9797979798, 0.98278236915, 0.98461891644, 0.98530762167, 0.98622589532, 0.98737373737, 0.98898071625, 0.99173553719, 0.99288337925, 0.99380165289, 0.99632690542, 0.99655647383, 0.99724517906, 0.99908172635, 0.99931129477]
pattern_even=[0.0, 0.00183654729, 0.00390266299, 0.00482093664, 0.00550964187, 0.00665748393, 0.00734618916, 0.00757575758, 0.00941230487, 0.0101010101, 0.01308539945, 0.01492194674, 0.01561065197, 0.01652892562, 0.01767676768, 0.01928374656, 0.02203856749, 0.02318640955, 0.0241046832, 0.02662993572, 0.02685950413, 0.02754820937, 0.02938475666, 0.02961432507, 0.0303030303, 0.03213957759, 0.0342056933, 0.03512396694, 0.03581267218, 0.03696051423, 0.03764921947, 0.03787878788, 0.03971533517, 0.0404040404, 0.04338842975, 0.04522497704, 0.04591368228, 0.04683195592, 0.04797979798, 0.04958677686, 0.0523415978, 0.05348943985, 0.0544077135, 0.05693296602, 0.05716253444, 0.05785123967, 0.05968778696, 0.05991735537, 0.06060606061, 0.0624426079, 0.0645087236, 0.06542699725, 0.06611570248, 0.06726354454, 0.06795224977, 0.06818181818, 0.07001836547, 0.07070707071, 0.07369146006, 0.07552800735, 0.07621671258, 0.07713498623, 0.07828282828, 0.07988980716, 0.0826446281, 0.08379247016, 0.0847107438, 0.08723599633, 0.08746556474, 0.08815426997, 0.08999081726, 0.09022038568, 0.09090909091, 0.0927456382, 0.0948117539, 0.09573002755, 0.09641873278, 0.09756657484, 0.09825528007, 0.09848484849, 0.10032139578, 0.10101010101, 0.10399449036, 0.10583103765, 0.10651974288, 0.10743801653, 0.10858585859, 0.11019283747, 0.1129476584, 0.11409550046, 0.11501377411, 0.11753902663, 0.11776859504, 0.11845730028, 0.12029384757, 0.12052341598, 0.12121212121, 0.1230486685, 0.12511478421, 0.12603305785, 0.12672176309, 0.12786960514, 0.12855831038, 0.12878787879, 0.13062442608, 0.13131313131, 0.13429752066, 0.13613406795, 0.13682277319, 0.13774104683, 0.13888888889, 0.14049586777, 0.14325068871, 0.14439853076, 0.14531680441, 0.14784205693, 0.14807162534, 0.14876033058, 0.15059687787, 0.15082644628, 0.15151515152, 0.15335169881, 0.15541781451, 0.15633608815, 0.15702479339, 0.15817263545, 0.15886134068, 0.15909090909, 0.16092745638, 0.16161616162, 0.16460055096, 0.16643709826, 0.16712580349, 0.16804407714, 0.16919191919, 0.17079889807, 0.17355371901, 0.17470156107, 0.17561983471, 0.17814508724, 0.17837465565, 0.17906336088, 0.18089990817, 0.18112947658, 0.18181818182, 0.18365472911, 0.18572084481, 0.18663911846, 0.18732782369, 0.18847566575, 0.18916437098, 0.18939393939, 0.19123048669, 0.19191919192, 0.19490358127, 0.19674012856, 0.19742883379, 0.19834710744, 0.1994949495, 0.20110192838, 0.20385674931, 0.20500459137, 0.20592286501, 0.20844811754, 0.20867768595, 0.20936639119, 0.21120293848, 0.21143250689, 0.21212121212, 0.21395775941, 0.21602387512, 0.21694214876, 0.21763085399, 0.21877869605, 0.21946740129, 0.2196969697, 0.22153351699, 0.22222222222, 0.22520661157, 0.22704315886, 0.2277318641, 0.22865013774, 0.2297979798, 0.23140495868, 0.23415977961, 0.23530762167, 0.23622589532, 0.23875114784, 0.23898071625, 0.23966942149, 0.24150596878, 0.24173553719, 0.24242424242, 0.24426078972, 0.24632690542, 0.24724517906, 0.2479338843, 0.24908172635, 0.24977043159, 0.25, 0.25183654729, 0.25252525253, 0.25550964187, 0.25734618916, 0.2580348944, 0.25895316804, 0.2601010101, 0.26170798898, 0.26446280992, 0.26561065197, 0.26652892562, 0.26905417815, 0.26928374656, 0.26997245179, 0.27180899908, 0.27203856749, 0.27272727273, 0.27456382002, 0.27662993572, 0.27754820937, 0.2782369146, 0.27938475666, 0.28007346189, 0.2803030303, 0.28213957759, 0.28282828283, 0.28581267218, 0.28764921947, 0.2883379247, 0.28925619835, 0.2904040404, 0.29201101928, 0.29476584022, 0.29591368228, 0.29683195592, 0.29935720845, 0.29958677686, 0.30027548209, 0.30211202939, 0.3023415978, 0.30303030303, 0.30486685032, 0.30693296602, 0.30785123967, 0.3085399449, 0.30968778696, 0.3103764922, 0.31060606061, 0.3124426079, 0.31313131313, 0.31611570248, 0.31795224977, 0.31864095501, 0.31955922865, 0.32070707071, 0.32231404959, 0.32506887052, 0.32621671258, 0.32713498623, 0.32966023875, 0.32988980716, 0.3305785124, 0.33241505969, 0.3326446281, 0.33333333333, 0.33516988062, 0.33723599633, 0.33815426997, 0.33884297521, 0.33999081726, 0.3406795225, 0.34090909091, 0.3427456382, 0.34343434343, 0.34641873278, 0.34825528007, 0.34894398531, 0.34986225895, 0.35101010101, 0.35261707989, 0.35537190083, 0.35651974288, 0.35743801653, 0.35996326905, 0.36019283747, 0.3608815427, 0.36271808999, 0.3629476584, 0.36363636364, 0.36547291093, 0.36753902663, 0.36845730028, 0.36914600551, 0.37029384757, 0.3709825528, 0.37121212121, 0.3730486685, 0.37373737374, 0.37672176309, 0.37855831038, 0.37924701561, 0.38016528926, 0.38131313131, 0.38292011019, 0.38567493113, 0.38682277319, 0.38774104683, 0.39026629936, 0.39049586777, 0.391184573, 0.39302112029, 0.39325068871, 0.39393939394, 0.39577594123, 0.39784205693, 0.39876033058, 0.39944903581, 0.40059687787, 0.4012855831, 0.40151515152, 0.40335169881, 0.40404040404, 0.40702479339, 0.40886134068, 0.40955004591, 0.41046831956, 0.41161616162, 0.4132231405, 0.41597796143, 0.41712580349, 0.41804407714, 0.42056932966, 0.42079889807, 0.42148760331, 0.4233241506, 0.42355371901, 0.42424242424, 0.42607897153, 0.42814508724, 0.42906336088, 0.42975206612, 0.43089990817, 0.43158861341, 0.43181818182, 0.43365472911, 0.43434343434, 0.43732782369, 0.43916437098, 0.43985307622, 0.44077134986, 0.44191919192, 0.4435261708, 0.44628099174, 0.44742883379, 0.44834710744, 0.45087235996, 0.45110192838, 0.45179063361, 0.4536271809, 0.45385674931, 0.45454545455, 0.45638200184, 0.45844811754, 0.45936639119, 0.46005509642, 0.46120293848, 0.46189164371, 0.46212121212, 0.46395775941, 0.46464646465, 0.46763085399, 0.46946740129, 0.47015610652, 0.47107438017, 0.47222222222, 0.4738292011, 0.47658402204, 0.4777318641, 0.47865013774, 0.48117539027, 0.48140495868, 0.48209366391, 0.4839302112, 0.48415977961, 0.48484848485, 0.48668503214, 0.48875114784, 0.48966942149, 0.49035812672, 0.49150596878, 0.49219467401, 0.49242424242, 0.49426078972, 0.49494949495, 0.4979338843, 0.49977043159, 0.50045913682, 0.50137741047, 0.50252525253, 0.50413223141, 0.50688705234, 0.5080348944, 0.50895316804, 0.51147842057, 0.51170798898, 0.51239669422, 0.51423324151, 0.51446280992, 0.51515151515, 0.51698806244, 0.51905417815, 0.51997245179, 0.52066115703, 0.52180899908, 0.52249770432, 0.52272727273, 0.52456382002, 0.52525252525, 0.5282369146, 0.53007346189, 0.53076216713, 0.53168044077, 0.53282828283, 0.53443526171, 0.53719008265, 0.5383379247, 0.53925619835, 0.54178145087, 0.54201101928, 0.54269972452, 0.54453627181, 0.54476584022, 0.54545454546, 0.54729109275, 0.54935720845, 0.55027548209, 0.55096418733, 0.55211202939, 0.55280073462, 0.55303030303, 0.55486685032, 0.55555555556, 0.5585399449, 0.5603764922, 0.56106519743, 0.56198347107, 0.56313131313, 0.56473829201, 0.56749311295, 0.56864095501, 0.56955922865, 0.57208448118, 0.57231404959, 0.57300275482, 0.57483930211, 0.57506887052, 0.57575757576, 0.57759412305, 0.57966023875, 0.5805785124, 0.58126721763, 0.58241505969, 0.58310376492, 0.58333333333, 0.58516988062, 0.58585858586, 0.58884297521, 0.5906795225, 0.59136822773, 0.59228650138, 0.59343434343, 0.59504132231, 0.59779614325, 0.59894398531, 0.59986225895, 0.60238751148, 0.60261707989, 0.60330578512, 0.60514233242, 0.60537190083, 0.60606060606, 0.60789715335, 0.60996326905, 0.6108815427, 0.61157024793, 0.61271808999, 0.61340679523, 0.61363636364, 0.61547291093, 0.61616161616, 0.61914600551, 0.6209825528, 0.62167125804, 0.62258953168, 0.62373737374, 0.62534435262, 0.62809917355, 0.62924701561, 0.63016528926, 0.63269054178, 0.63292011019, 0.63360881543, 0.63544536272, 0.63567493113, 0.63636363636, 0.63820018366, 0.64026629936, 0.641184573, 0.64187327824, 0.64302112029, 0.64370982553, 0.64393939394, 0.64577594123, 0.64646464647, 0.64944903581, 0.6512855831, 0.65197428834, 0.65289256198, 0.65404040404, 0.65564738292, 0.65840220386, 0.65955004591, 0.66046831956, 0.66299357208, 0.6632231405, 0.66391184573, 0.66574839302, 0.66597796143, 0.66666666667, 0.66850321396, 0.67056932966, 0.67148760331, 0.67217630854, 0.6733241506, 0.67401285583, 0.67424242424, 0.67607897153, 0.67676767677, 0.67975206612, 0.68158861341, 0.68227731864, 0.68319559229, 0.68434343434, 0.68595041322, 0.68870523416, 0.68985307622, 0.69077134986, 0.69329660239, 0.6935261708, 0.69421487603, 0.69605142332, 0.69628099174, 0.69696969697, 0.69880624426, 0.70087235996, 0.70179063361, 0.70247933884, 0.7036271809, 0.70431588613, 0.70454545455, 0.70638200184, 0.70707070707, 0.71005509642, 0.71189164371, 0.71258034894, 0.71349862259, 0.71464646465, 0.71625344353, 0.71900826446, 0.72015610652, 0.72107438017, 0.72359963269, 0.7238292011, 0.72451790634, 0.72635445363, 0.72658402204, 0.72727272727, 0.72910927456, 0.73117539027, 0.73209366391, 0.73278236915, 0.7339302112, 0.73461891644, 0.73484848485, 0.73668503214, 0.73737373737, 0.74035812672, 0.74219467401, 0.74288337925, 0.74380165289, 0.74494949495, 0.74655647383, 0.74931129477, 0.75045913682, 0.75137741047, 0.75390266299, 0.75413223141, 0.75482093664, 0.75665748393, 0.75688705234, 0.75757575758, 0.75941230487, 0.76147842057, 0.76239669422, 0.76308539945, 0.76423324151, 0.76492194674, 0.76515151515, 0.76698806244, 0.76767676768, 0.77066115703, 0.77249770432, 0.77318640955, 0.7741046832, 0.77525252525, 0.77685950413, 0.77961432507, 0.78076216713, 0.78168044077, 0.7842056933, 0.78443526171, 0.78512396694, 0.78696051423, 0.78719008265, 0.78787878788, 0.78971533517, 0.79178145087, 0.79269972452, 0.79338842975, 0.79453627181, 0.79522497704, 0.79545454546, 0.79729109275, 0.79797979798, 0.80096418733, 0.80280073462, 0.80348943985, 0.8044077135, 0.80555555556, 0.80716253444, 0.80991735537, 0.81106519743, 0.81198347107, 0.8145087236, 0.81473829201, 0.81542699725, 0.81726354454, 0.81749311295, 0.81818181818, 0.82001836547, 0.82208448118, 0.82300275482, 0.82369146006, 0.82483930211, 0.82552800735, 0.82575757576, 0.82759412305, 0.82828282828, 0.83126721763, 0.83310376492, 0.83379247016, 0.8347107438, 0.83585858586, 0.83746556474, 0.84022038568, 0.84136822773, 0.84228650138, 0.8448117539, 0.84504132231, 0.84573002755, 0.84756657484, 0.84779614325, 0.84848484849, 0.85032139578, 0.85238751148, 0.85330578512, 0.85399449036, 0.85514233242, 0.85583103765, 0.85606060606, 0.85789715335, 0.85858585859, 0.86157024793, 0.86340679523, 0.86409550046, 0.86501377411, 0.86616161616, 0.86776859504, 0.87052341598, 0.87167125804, 0.87258953168, 0.87511478421, 0.87534435262, 0.87603305785, 0.87786960514, 0.87809917355, 0.87878787879, 0.88062442608, 0.88269054178, 0.88360881543, 0.88429752066, 0.88544536272, 0.88613406795, 0.88636363636, 0.88820018366, 0.88888888889, 0.89187327824, 0.89370982553, 0.89439853076, 0.89531680441, 0.89646464647, 0.89807162534, 0.90082644628, 0.90197428834, 0.90289256198, 0.90541781451, 0.90564738292, 0.90633608815, 0.90817263545, 0.90840220386, 0.90909090909, 0.91092745638, 0.91299357208, 0.91391184573, 0.91460055096, 0.91574839302, 0.91643709826, 0.91666666667, 0.91850321396, 0.91919191919, 0.92217630854, 0.92401285583, 0.92470156107, 0.92561983471, 0.92676767677, 0.92837465565, 0.93112947658, 0.93227731864, 0.93319559229, 0.93572084481, 0.93595041322, 0.93663911846, 0.93847566575, 0.93870523416, 0.93939393939, 0.94123048669, 0.94329660239, 0.94421487603, 0.94490358127, 0.94605142332, 0.94674012856, 0.94696969697, 0.94880624426, 0.9494949495, 0.95247933884, 0.95431588613, 0.95500459137, 0.95592286501, 0.95707070707, 0.95867768595, 0.96143250689, 0.96258034894, 0.96349862259, 0.96602387512, 0.96625344353, 0.96694214876, 0.96877869605, 0.96900826446, 0.9696969697, 0.97153351699, 0.97359963269, 0.97451790634, 0.97520661157, 0.97635445363, 0.97704315886, 0.97727272727, 0.97910927456, 0.9797979798, 0.98278236915, 0.98461891644, 0.98530762167, 0.98622589532, 0.98737373737, 0.98898071625, 0.99173553719, 0.99288337925, 0.99380165289, 0.99632690542, 0.99655647383, 0.99724517906, 0.99908172635, 0.99931129477]
averages_even={0.0: [0.0], 0.25: [0.5], 0.89439853076: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.58585858586: [0.3333333333333, 0.6666666666667], 0.49426078972: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.641184573: [0.6818181818182, 0.3181818181818], 0.34825528007: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.29958677686: [0.8636363636364, 0.1363636363636], 0.59986225895: [0.2272727272727, 0.7727272727273], 0.391184573: [0.1818181818182, 0.8181818181818], 0.88269054178: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.74035812672: [0.0454545454545, 0.9545454545455], 0.64302112029: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.39784205693: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.08746556474: [0.8636363636364, 0.1363636363636], 0.86340679523: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.02754820937: [0.1818181818182, 0.8181818181818], 0.74931129477: [0.0909090909091, 0.9090909090909], 0.34343434343: [0.3333333333333, 0.6666666666667], 0.07369146006: [0.0454545454545, 0.9545454545455], 0.03764921947: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.52272727273: [0.5], 0.93572084481: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.85583103765: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.10743801653: [0.7272727272727, 0.2727272727273], 0.66391184573: [0.1818181818182, 0.8181818181818], 0.09825528007: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79338842975: [0.4545454545455, 0.5454545454545], 0.52456382002: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.0303030303: [0.0], 0.98278236915: [0.0454545454545, 0.9545454545455], 0.10583103765: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.76308539945: [0.4545454545455, 0.5454545454545], 0.16643709826: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.42607897153: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.15886134068: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.72107438017: [0.2272727272727, 0.7727272727273], 0.04683195592: [0.7272727272727, 0.2727272727273], 0.62373737374: [0.8333333333333, 0.1666666666667], 0.32713498623: [0.2272727272727, 0.7727272727273], 0.99931129477: [0.5909090909091, 0.4090909090909], 0.64370982553: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.19123048669: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.98530762167: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.26905417815: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.90197428834: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.37672176309: [0.0454545454545, 0.9545454545455], 0.27938475666: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.0927456382: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.63544536272: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.86409550046: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.32231404959: [0.3636363636364, 0.6363636363636], 0.0342056933: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.51423324151: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.57300275482: [0.1818181818182, 0.8181818181818], 0.66666666667: [0.0], 0.93847566575: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.42056932966: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.54545454546: [0.0], 0.0101010101: [0.3333333333333, 0.6666666666667], 0.27456382002: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.57966023875: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.60606060606: [0.0], 0.06542699725: [0.6818181818182, 0.3181818181818], 0.89531680441: [0.7272727272727, 0.2727272727273], 0.00183654729: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.02938475666: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.42148760331: [0.1818181818182, 0.8181818181818], 0.20500459137: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.15633608815: [0.6818181818182, 0.3181818181818], 0.41597796143: [0.0909090909091, 0.9090909090909], 0.58333333333: [0.5], 0.30693296602: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.84022038568: [0.0909090909091, 0.9090909090909], 0.2297979798: [0.8333333333333, 0.1666666666667], 0.18112947658: [0.5909090909091, 0.4090909090909], 0.18365472911: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.42424242424: [0.0], 0.5603764922: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.15151515152: [0.0], 0.89646464647: [0.8333333333333, 0.1666666666667], 0.35651974288: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.30785123967: [0.6818181818182, 0.3181818181818], 0.18572084481: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.77249770432: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.20592286501: [0.2272727272727, 0.7727272727273], 0.07552800735: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.12672176309: [0.4545454545455, 0.5454545454545], 0.78168044077: [0.2272727272727, 0.7727272727273], 0.65955004591: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.35743801653: [0.2272727272727, 0.7727272727273], 0.67607897153: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.89807162534: [0.3636363636364, 0.6363636363636], 0.90082644628: [0.0909090909091, 0.9090909090909], 0.03787878788: [0.5], 0.95247933884: [0.0454545454545, 0.9545454545455], 0.99724517906: [0.1818181818182, 0.8181818181818], 0.4012855831: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.00550964187: [0.4545454545455, 0.5454545454545], 0.08815426997: [0.1818181818182, 0.8181818181818], 0.7842056933: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.60789715335: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.0241046832: [0.2272727272727, 0.7727272727273], 0.00390266299: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.01561065197: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.24977043159: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.45087235996: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.68227731864: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.11019283747: [0.3636363636364, 0.6363636363636], 0.64026629936: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.29476584022: [0.0909090909091, 0.9090909090909], 0.2196969697: [0.5], 0.79453627181: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.00665748393: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.90840220386: [0.5909090909091, 0.4090909090909], 0.51997245179: [0.6818181818182, 0.3181818181818], 0.68319559229: [0.7272727272727, 0.2727272727273], 0.74219467401: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.28764921947: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.08999081726: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.78696051423: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.83379247016: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.68595041322: [0.3636363636364, 0.6363636363636], 0.61914600551: [0.0454545454545, 0.9545454545455], 0.52180899908: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.87258953168: [0.2272727272727, 0.7727272727273], 0.33723599633: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.9797979798: [0.6666666666667, 0.3333333333333], 0.38016528926: [0.7272727272727, 0.2727272727273], 0.76698806244: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.66299357208: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.07070707071: [0.3333333333333, 0.6666666666667], 0.48415977961: [0.5909090909091, 0.4090909090909], 0.6209825528: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.52525252525: [0.3333333333333, 0.6666666666667], 0.38682277319: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.31955922865: [0.7272727272727, 0.2727272727273], 0.42975206612: [0.4545454545455, 0.5454545454545], 0.49035812672: [0.4545454545455, 0.5454545454545], 0.33241505969: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.46395775941: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.56749311295: [0.0909090909091, 0.9090909090909], 0.87511478421: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.05991735537: [0.5909090909091, 0.4090909090909], 0.09090909091: [0.0], 0.64187327824: [0.4545454545455, 0.5454545454545], 0.13613406795: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.01767676768: [0.8333333333333, 0.1666666666667], 0.50252525253: [0.8333333333333, 0.1666666666667], 0.67217630854: [0.4545454545455, 0.5454545454545], 0.39302112029: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.84779614325: [0.5909090909091, 0.4090909090909], 0.16092745638: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.82483930211: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.92217630854: [0.0454545454545, 0.9545454545455], 0.31611570248: [0.0454545454545, 0.9545454545455], 0.99288337925: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.76767676768: [0.3333333333333, 0.6666666666667], 0.58310376492: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79729109275: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.26170798898: [0.3636363636364, 0.6363636363636], 0.4738292011: [0.3636363636364, 0.6363636363636], 0.90817263545: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.71349862259: [0.7272727272727, 0.2727272727273], 0.93939393939: [0.0], 0.35996326905: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.62258953168: [0.7272727272727, 0.2727272727273], 0.13131313131: [0.3333333333333, 0.6666666666667], 0.09022038568: [0.5909090909091, 0.4090909090909], 0.5805785124: [0.6818181818182, 0.3181818181818], 0.91643709826: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.86616161616: [0.8333333333333, 0.1666666666667], 0.40955004591: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.3608815427: [0.1818181818182, 0.8181818181818], 0.80096418733: [0.0454545454545, 0.9545454545455], 0.67975206612: [0.0454545454545, 0.9545454545455], 0.96602387512: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.81106519743: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.6632231405: [0.8636363636364, 0.1363636363636], 0.41046831956: [0.7272727272727, 0.2727272727273], 0.54729109275: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.04522497704: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.12603305785: [0.6818181818182, 0.3181818181818], 0.19490358127: [0.0454545454545, 0.9545454545455], 0.43434343434: [0.3333333333333, 0.6666666666667], 0.84228650138: [0.2272727272727, 0.7727272727273], 0.75137741047: [0.2272727272727, 0.7727272727273], 0.1994949495: [0.8333333333333, 0.1666666666667], 0.69605142332: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.30303030303: [0.0], 0.15082644628: [0.5909090909091, 0.4090909090909], 0.38567493113: [0.0909090909091, 0.9090909090909], 0.72727272727: [0.0], 0.29591368228: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.28007346189: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.17561983471: [0.2272727272727, 0.7727272727273], 0.91092745638: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.60514233242: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.63567493113: [0.5909090909091, 0.4090909090909], 0.5383379247: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.29683195592: [0.2272727272727, 0.7727272727273], 0.24908172635: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.49242424242: [0.5], 0.8044077135: [0.7272727272727, 0.2727272727273], 0.76423324151: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.34641873278: [0.0454545454545, 0.9545454545455], 0.87603305785: [0.1818181818182, 0.8181818181818], 0.3406795225: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.29201101928: [0.3636363636364, 0.6363636363636], 0.73668503214: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.30486685032: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.75688705234: [0.5909090909091, 0.4090909090909], 0.18089990817: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.21946740129: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.09756657484: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.17079889807: [0.3636363636364, 0.6363636363636], 0.50413223141: [0.3636363636364, 0.6363636363636], 0.95431588613: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.51905417815: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.67676767677: [0.3333333333333, 0.6666666666667], 0.18939393939: [0.5], 0.43985307622: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.96900826446: [0.5909090909091, 0.4090909090909], 0.75665748393: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.75941230487: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.56473829201: [0.3636363636364, 0.6363636363636], 0.76492194674: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.27662993572: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.23898071625: [0.8636363636364, 0.1363636363636], 0.85858585859: [0.6666666666667, 0.3333333333333], 0.54178145087: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.47222222222: [0.8333333333333, 0.1666666666667], 0.42355371901: [0.5909090909091, 0.4090909090909], 0.03512396694: [0.6818181818182, 0.3181818181818], 0.32621671258: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.27754820937: [0.6818181818182, 0.3181818181818], 0.45179063361: [0.1818181818182, 0.8181818181818], 0.97727272727: [0.5], 0.36914600551: [0.4545454545455, 0.5454545454545], 0.71625344353: [0.3636363636364, 0.6363636363636], 0.06795224977: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79797979798: [0.6666666666667, 0.3333333333333], 0.69628099174: [0.5909090909091, 0.4090909090909], 0.59894398531: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.01308539945: [0.0454545454545, 0.9545454545455], 0.94880624426: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.6733241506: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.06818181818: [0.5], 0.59504132231: [0.3636363636364, 0.6363636363636], 0.63636363636: [0.0], 0.82369146006: [0.4545454545455, 0.5454545454545], 0.23415977961: [0.0909090909091, 0.9090909090909], 0.60330578512: [0.1818181818182, 0.8181818181818], 0.13062442608: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94490358127: [0.4545454545455, 0.5454545454545], 0.86157024793: [0.0454545454545, 0.9545454545455], 0.95867768595: [0.3636363636364, 0.6363636363636], 0.71900826446: [0.0909090909091, 0.9090909090909], 0.15541781451: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.03213957759: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.02318640955: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.05348943985: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.55303030303: [0.5], 0.81818181818: [0.0], 0.50137741047: [0.7272727272727, 0.2727272727273], 0.65404040404: [0.8333333333333, 0.1666666666667], 0.82208448118: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.40335169881: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.0948117539: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.90289256198: [0.2272727272727, 0.7727272727273], 0.25734618916: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.34894398531: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.30027548209: [0.1818181818182, 0.8181818181818], 0.13682277319: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.0826446281: [0.0909090909091, 0.9090909090909], 0.5585399449: [0.0454545454545, 0.9545454545455], 0.88888888889: [0.6666666666667, 0.3333333333333], 0.2479338843: [0.4545454545455, 0.5454545454545], 0.55555555556: [0.3333333333333, 0.6666666666667], 0.97704315886: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.34986225895: [0.7272727272727, 0.2727272727273], 0.15059687787: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.00941230487: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.25252525253: [0.3333333333333, 0.6666666666667], 0.60261707989: [0.8636363636364, 0.1363636363636], 0.39944903581: [0.4545454545455, 0.5454545454545], 0.94421487603: [0.6818181818182, 0.3181818181818], 0.73209366391: [0.6818181818182, 0.3181818181818], 0.17355371901: [0.0909090909091, 0.9090909090909], 0.10101010101: [0.3333333333333, 0.6666666666667], 0.88544536272: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.70247933884: [0.4545454545455, 0.5454545454545], 0.58126721763: [0.4545454545455, 0.5454545454545], 0.14531680441: [0.2272727272727, 0.7727272727273], 0.7339302112: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.75045913682: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.85514233242: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.53925619835: [0.2272727272727, 0.7727272727273], 0.21877869605: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.49977043159: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.87534435262: [0.8636363636364, 0.1363636363636], 0.43181818182: [0.5], 0.01492194674: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.97910927456: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.28581267218: [0.0454545454545, 0.9545454545455], 0.74380165289: [0.7272727272727, 0.2727272727273], 0.07001836547: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.48140495868: [0.8636363636364, 0.1363636363636], 0.61547291093: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.88820018366: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.44077134986: [0.7272727272727, 0.2727272727273], 0.85399449036: [0.4545454545455, 0.5454545454545], 0.18916437098: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.16161616162: [0.3333333333333, 0.6666666666667], 0.32966023875: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.14049586777: [0.3636363636364, 0.6363636363636], 0.71464646465: [0.8333333333333, 0.1666666666667], 0.43365472911: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.86776859504: [0.3636363636364, 0.6363636363636], 0.96143250689: [0.0909090909091, 0.9090909090909], 0.47658402204: [0.0909090909091, 0.9090909090909], 0.21395775941: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.99632690542: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.83310376492: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.37924701561: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.96349862259: [0.2272727272727, 0.7727272727273], 0.3305785124: [0.1818181818182, 0.8181818181818], 0.70179063361: [0.6818181818182, 0.3181818181818], 0.05968778696: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.63820018366: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.19834710744: [0.7272727272727, 0.2727272727273], 0.88636363636: [0.5], 0.6935261708: [0.8636363636364, 0.1363636363636], 0.66850321396: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.03581267218: [0.4545454545455, 0.5454545454545], 0.20867768595: [0.8636363636364, 0.1363636363636], 0.76239669422: [0.6818181818182, 0.3181818181818], 0.73737373737: [0.3333333333333, 0.6666666666667], 0.72359963269: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.41161616162: [0.8333333333333, 0.1666666666667], 0.73484848485: [0.5], 0.3629476584: [0.5909090909091, 0.4090909090909], 0.26561065197: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.00734618916: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.84848484849: [0.0], 0.07713498623: [0.7272727272727, 0.2727272727273], 0.01928374656: [0.3636363636364, 0.6363636363636], 0.11753902663: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.46120293848: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.57506887052: [0.5909090909091, 0.4090909090909], 0.8347107438: [0.7272727272727, 0.2727272727273], 0.2277318641: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.26652892562: [0.2272727272727, 0.7727272727273], 0.17906336088: [0.1818181818182, 0.8181818181818], 0.92676767677: [0.8333333333333, 0.1666666666667], 0.98737373737: [0.8333333333333, 0.1666666666667], 0.46212121212: [0.5], 0.11409550046: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.20385674931: [0.0909090909091, 0.9090909090909], 0.48668503214: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.3103764922: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.78443526171: [0.8636363636364, 0.1363636363636], 0.22865013774: [0.7272727272727, 0.2727272727273], 0.54269972452: [0.1818181818182, 0.8181818181818], 0.59779614325: [0.0909090909091, 0.9090909090909], 0.12511478421: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.88429752066: [0.4545454545455, 0.5454545454545], 0.24724517906: [0.6818181818182, 0.3181818181818], 0.81542699725: [0.1818181818182, 0.8181818181818], 0.69696969697: [0.0], 0.39026629936: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.3427456382: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.82575757576: [0.5], 0.91574839302: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.17470156107: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.48966942149: [0.6818181818182, 0.3181818181818], 0.59228650138: [0.7272727272727, 0.2727272727273], 0.05785123967: [0.1818181818182, 0.8181818181818], 0.82001836547: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.28925619835: [0.7272727272727, 0.2727272727273], 0.85606060606: [0.5], 0.73117539027: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.44191919192: [0.8333333333333, 0.1666666666667], 0.39325068871: [0.5909090909091, 0.4090909090909], 0.12121212121: [0.0], 0.06060606061: [0.0], 0.00757575758: [0.5], 0.06611570248: [0.4545454545455, 0.5454545454545], 0.0847107438: [0.2272727272727, 0.7727272727273], 0.33884297521: [0.4545454545455, 0.5454545454545], 0.6108815427: [0.6818181818182, 0.3181818181818], 0.49150596878: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.00482093664: [0.6818181818182, 0.3181818181818], 0.62534435262: [0.3636363636364, 0.6363636363636], 0.97153351699: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.77685950413: [0.3636363636364, 0.6363636363636], 0.71005509642: [0.0454545454545, 0.9545454545455], 0.88360881543: [0.6818181818182, 0.3181818181818], 0.61271808999: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.77525252525: [0.8333333333333, 0.1666666666667], 0.97520661157: [0.4545454545455, 0.5454545454545], 0.18847566575: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.71189164371: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.95500459137: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.87786960514: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.88613406795: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.16460055096: [0.0454545454545, 0.9545454545455], 0.46946740129: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.56106519743: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.42079889807: [0.8636363636364, 0.1363636363636], 0.73278236915: [0.4545454545455, 0.5454545454545], 0.97359963269: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.03971533517: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.06726354454: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.69077134986: [0.2272727272727, 0.7727272727273], 0.59343434343: [0.8333333333333, 0.1666666666667], 0.57759412305: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.3730486685: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.46464646465: [0.3333333333333, 0.6666666666667], 0.8145087236: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.10399449036: [0.0454545454545, 0.9545454545455], 0.04591368228: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.31864095501: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.78719008265: [0.5909090909091, 0.4090909090909], 0.26997245179: [0.1818181818182, 0.8181818181818], 0.73461891644: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.93112947658: [0.0909090909091, 0.9090909090909], 0.20844811754: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.93595041322: [0.8636363636364, 0.1363636363636], 0.51698806244: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.07988980716: [0.3636363636364, 0.6363636363636], 0.57231404959: [0.8636363636364, 0.1363636363636], 0.72451790634: [0.1818181818182, 0.8181818181818], 0.17837465565: [0.8636363636364, 0.1363636363636], 0.96694214876: [0.1818181818182, 0.8181818181818], 0.61616161616: [0.3333333333333, 0.6666666666667], 0.67148760331: [0.6818181818182, 0.3181818181818], 0.96625344353: [0.8636363636364, 0.1363636363636], 0.3023415978: [0.5909090909091, 0.4090909090909], 0.45385674931: [0.5909090909091, 0.4090909090909], 0.52066115703: [0.4545454545455, 0.5454545454545], 0.40059687787: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.24426078972: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.14876033058: [0.1818181818182, 0.8181818181818], 0.91299357208: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.40151515152: [0.5], 0.22222222222: [0.3333333333333, 0.6666666666667], 0.25550964187: [0.0454545454545, 0.9545454545455], 0.39577594123: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.04338842975: [0.0454545454545, 0.9545454545455], 0.47865013774: [0.2272727272727, 0.7727272727273], 0.97451790634: [0.6818181818182, 0.3181818181818], 0.45110192838: [0.8636363636364, 0.1363636363636], 0.78787878788: [0.0], 0.55486685032: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94329660239: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.04958677686: [0.3636363636364, 0.6363636363636], 0.45638200184: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.29935720845: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.47015610652: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.49494949495: [0.3333333333333, 0.6666666666667], 0.44628099174: [0.0909090909091, 0.9090909090909], 0.88062442608: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.57575757576: [0.0], 0.30211202939: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.99908172635: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.24173553719: [0.5909090909091, 0.4090909090909], 0.94605142332: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.82300275482: [0.6818181818182, 0.3181818181818], 0.14439853076: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.50688705234: [0.0909090909091, 0.9090909090909], 0.63292011019: [0.8636363636364, 0.1363636363636], 0.83126721763: [0.0454545454545, 0.9545454545455], 0.96877869605: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.48117539027: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.51515151515: [0.0], 0.7741046832: [0.7272727272727, 0.2727272727273], 0.16919191919: [0.8333333333333, 0.1666666666667], 0.93319559229: [0.2272727272727, 0.7727272727273], 0.60996326905: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.38131313131: [0.8333333333333, 0.1666666666667], 0.51147842057: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.3326446281: [0.5909090909091, 0.4090909090909], 0.89370982553: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.2782369146: [0.4545454545455, 0.5454545454545], 0.43089990817: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.04797979798: [0.8333333333333, 0.1666666666667], 0.02961432507: [0.5909090909091, 0.4090909090909], 0.85032139578: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.9696969697: [0.0], 0.65564738292: [0.3636363636364, 0.6363636363636], 0.58884297521: [0.0454545454545, 0.9545454545455], 0.81473829201: [0.8636363636364, 0.1363636363636], 0.9494949495: [0.6666666666667, 0.3333333333333], 0.70454545455: [0.5], 0.15817263545: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.75482093664: [0.1818181818182, 0.8181818181818], 0.5906795225: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.05716253444: [0.8636363636364, 0.1363636363636], 0.78076216713: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.13429752066: [0.0454545454545, 0.9545454545455], 0.3085399449: [0.4545454545455, 0.5454545454545], 0.87167125804: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.51446280992: [0.5909090909091, 0.4090909090909], 0.81726354454: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.92837465565: [0.3636363636364, 0.6363636363636], 0.66574839302: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.3709825528: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.15909090909: [0.5], 0.50045913682: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.56955922865: [0.2272727272727, 0.7727272727273], 0.85330578512: [0.6818181818182, 0.3181818181818], 0.11845730028: [0.1818181818182, 0.8181818181818], 0.3124426079: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.35537190083: [0.0909090909091, 0.9090909090909], 0.15335169881: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.95592286501: [0.7272727272727, 0.2727272727273], 0.2580348944: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.45936639119: [0.6818181818182, 0.3181818181818], 0.92561983471: [0.7272727272727, 0.2727272727273], 0.48209366391: [0.1818181818182, 0.8181818181818], 0.4536271809: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.80991735537: [0.0909090909091, 0.9090909090909], 0.17814508724: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.25895316804: [0.7272727272727, 0.2727272727273], 0.67056932966: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.45454545455: [0.0], 0.90564738292: [0.8636363636364, 0.1363636363636], 0.98898071625: [0.3636363636364, 0.6363636363636], 0.74494949495: [0.8333333333333, 0.1666666666667], 0.02662993572: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.55027548209: [0.6818181818182, 0.3181818181818], 0.05693296602: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.64944903581: [0.0454545454545, 0.9545454545455], 0.55211202939: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.24632690542: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.64646464647: [0.3333333333333, 0.6666666666667], 0.68870523416: [0.0909090909091, 0.9090909090909], 0.10032139578: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.69329660239: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.94696969697: [0.5], 0.34090909091: [0.5], 0.14784205693: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.02203856749: [0.0909090909091, 0.9090909090909], 0.5080348944: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.08379247016: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.35261707989: [0.3636363636364, 0.6363636363636], 0.14325068871: [0.0909090909091, 0.9090909090909], 0.43916437098: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.39049586777: [0.8636363636364, 0.1363636363636], 0.85789715335: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94123048669: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.61363636364: [0.5], 0.16804407714: [0.7272727272727, 0.2727272727273], 0.57483930211: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.48875114784: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.63016528926: [0.2272727272727, 0.7727272727273], 0.53282828283: [0.8333333333333, 0.1666666666667], 0.24150596878: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.10858585859: [0.8333333333333, 0.1666666666667], 0.09641873278: [0.4545454545455, 0.5454545454545], 0.10651974288: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.67401285583: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.74655647383: [0.3636363636364, 0.6363636363636], 0.2883379247: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.4839302112: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.21763085399: [0.4545454545455, 0.5454545454545], 0.0544077135: [0.2272727272727, 0.7727272727273], 0.09573002755: [0.6818181818182, 0.3181818181818], 0.70638200184: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.27180899908: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.51170798898: [0.8636363636364, 0.1363636363636], 0.23622589532: [0.2272727272727, 0.7727272727273], 0.99380165289: [0.2272727272727, 0.7727272727273], 0.84756657484: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.65289256198: [0.7272727272727, 0.2727272727273], 0.0624426079: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.13888888889: [0.8333333333333, 0.1666666666667], 0.32070707071: [0.8333333333333, 0.1666666666667], 0.27203856749: [0.5909090909091, 0.4090909090909], 0.39393939394: [0.0], 0.94674012856: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.54201101928: [0.8636363636364, 0.1363636363636], 0.93227731864: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.46763085399: [0.0454545454545, 0.9545454545455], 0.37029384757: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.4132231405: [0.3636363636364, 0.6363636363636], 0.72910927456: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.69880624426: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.53443526171: [0.3636363636364, 0.6363636363636], 0.37121212121: [0.5], 0.23140495868: [0.3636363636364, 0.6363636363636], 0.82828282828: [0.6666666666667, 0.3333333333333], 0.36547291093: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.63360881543: [0.1818181818182, 0.8181818181818], 0.87809917355: [0.5909090909091, 0.4090909090909], 0.20936639119: [0.1818181818182, 0.8181818181818], 0.80555555556: [0.8333333333333, 0.1666666666667], 0.84573002755: [0.1818181818182, 0.8181818181818], 0.91919191919: [0.6666666666667, 0.3333333333333], 0.66597796143: [0.5909090909091, 0.4090909090909], 0.07828282828: [0.8333333333333, 0.1666666666667], 0.56864095501: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.80716253444: [0.3636363636364, 0.6363636363636], 0.26446280992: [0.0909090909091, 0.9090909090909], 0.12878787879: [0.5], 0.53719008265: [0.0909090909091, 0.9090909090909], 0.25183654729: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.90633608815: [0.1818181818182, 0.8181818181818], 0.49219467401: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.87878787879: [0.0], 0.27272727273: [0.0], 0.44742883379: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.68985307622: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.39876033058: [0.6818181818182, 0.3181818181818], 0.77318640955: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.22704315886: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.03696051423: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.78971533517: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.44834710744: [0.2272727272727, 0.7727272727273], 0.54935720845: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.35101010101: [0.8333333333333, 0.1666666666667], 0.09848484849: [0.5], 0.82759412305: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.79269972452: [0.6818181818182, 0.3181818181818], 0.4979338843: [0.0454545454545, 0.9545454545455], 0.46005509642: [0.4545454545455, 0.5454545454545], 0.76515151515: [0.5], 0.91850321396: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.1230486685: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.4435261708: [0.3636363636364, 0.6363636363636], 0.59136822773: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.19742883379: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.71258034894: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.93663911846: [0.1818181818182, 0.8181818181818], 0.69421487603: [0.1818181818182, 0.8181818181818], 0.5282369146: [0.0454545454545, 0.9545454545455], 0.83746556474: [0.3636363636364, 0.6363636363636], 0.21602387512: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.98622589532: [0.7272727272727, 0.2727272727273], 0.23875114784: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.54476584022: [0.5909090909091, 0.4090909090909], 0.84136822773: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.53007346189: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.2803030303: [0.5], 0.77961432507: [0.0909090909091, 0.9090909090909], 0.62809917355: [0.0909090909091, 0.9090909090909], 0.24242424242: [0.0], 0.52249770432: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.37855831038: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.45844811754: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.32988980716: [0.8636363636364, 0.1363636363636], 0.21694214876: [0.6818181818182, 0.3181818181818], 0.13774104683: [0.7272727272727, 0.2727272727273], 0.98461891644: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.7036271809: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.42814508724: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.83585858586: [0.8333333333333, 0.1666666666667], 0.50895316804: [0.2272727272727, 0.7727272727273], 0.11776859504: [0.8636363636364, 0.1363636363636], 0.28213957759: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.21120293848: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.37373737374: [0.3333333333333, 0.6666666666667], 0.32506887052: [0.0909090909091, 0.9090909090909], 0.55280073462: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.4777318641: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.42906336088: [0.6818181818182, 0.3181818181818], 0.4233241506: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.18732782369: [0.4545454545455, 0.5454545454545], 0.57208448118: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.65197428834: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.58516988062: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.76147842057: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.72635445363: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.21212121212: [0.0], 0.74288337925: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.53168044077: [0.7272727272727, 0.2727272727273], 0.68434343434: [0.8333333333333, 0.1666666666667], 0.62924701561: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.2601010101: [0.8333333333333, 0.1666666666667], 0.82552800735: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.67424242424: [0.5], 0.75413223141: [0.8636363636364, 0.1363636363636], 0.91666666667: [0.5], 0.40702479339: [0.0454545454545, 0.9545454545455], 0.30968778696: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.61340679523: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.36363636364: [0.0], 0.0404040404: [0.3333333333333, 0.6666666666667], 0.92470156107: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.99655647383: [0.8636363636364, 0.1363636363636], 0.62167125804: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.0645087236: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.31060606061: [0.5], 0.20110192838: [0.3636363636364, 0.6363636363636], 0.70707070707: [0.6666666666667, 0.3333333333333], 0.07621671258: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.51239669422: [0.1818181818182, 0.8181818181818], 0.40886134068: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.79178145087: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.36019283747: [0.8636363636364, 0.1363636363636], 0.1129476584: [0.0909090909091, 0.9090909090909], 0.61157024793: [0.4545454545455, 0.5454545454545], 0.12855831038: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.6512855831: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.90541781451: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.40404040404: [0.3333333333333, 0.6666666666667], 0.84504132231: [0.8636363636364, 0.1363636363636], 0.64393939394: [0.5], 0.28282828283: [0.3333333333333, 0.6666666666667], 0.78512396694: [0.1818181818182, 0.8181818181818], 0.60238751148: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.95707070707: [0.8333333333333, 0.1666666666667], 0.33815426997: [0.6818181818182, 0.3181818181818], 0.72658402204: [0.5909090909091, 0.4090909090909], 0.70431588613: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.48484848485: [0.0], 0.19674012856: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.92401285583: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.72015610652: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.14807162534: [0.8636363636364, 0.1363636363636], 0.38774104683: [0.2272727272727, 0.7727272727273], 0.2904040404: [0.8333333333333, 0.1666666666667], 0.22153351699: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.33333333333: [0.0], 0.43732782369: [0.0454545454545, 0.9545454545455], 0.33999081726: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.43158861341: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.38292011019: [0.3636363636364, 0.6363636363636], 0.16712580349: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.93870523416: [0.5909090909091, 0.4090909090909], 0.91460055096: [0.4545454545455, 0.5454545454545], 0.0523415978: [0.0909090909091, 0.9090909090909], 0.7238292011: [0.8636363636364, 0.1363636363636], 0.02685950413: [0.8636363636364, 0.1363636363636], 0.75390266299: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.12029384757: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.86501377411: [0.7272727272727, 0.2727272727273], 0.77066115703: [0.0454545454545, 0.9545454545455], 0.19191919192: [0.3333333333333, 0.6666666666667], 0.33516988062: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.70087235996: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.08723599633: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.79545454546: [0.5], 0.55096418733: [0.4545454545455, 0.5454545454545], 0.12052341598: [0.5909090909091, 0.4090909090909], 0.56198347107: [0.7272727272727, 0.2727272727273], 0.64577594123: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.60537190083: [0.5909090909091, 0.4090909090909], 0.31795224977: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.26928374656: [0.8636363636364, 0.1363636363636], 0.81198347107: [0.2272727272727, 0.7727272727273], 0.23530762167: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.81749311295: [0.5909090909091, 0.4090909090909], 0.18663911846: [0.6818181818182, 0.3181818181818], 0.79522497704: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.90909090909: [0.0], 0.58241505969: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.36753902663: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.21143250689: [0.5909090909091, 0.4090909090909], 0.31313131313: [0.3333333333333, 0.6666666666667], 0.01652892562: [0.7272727272727, 0.2727272727273], 0.68158861341: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.41712580349: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.36845730028: [0.6818181818182, 0.3181818181818], 0.11501377411: [0.2272727272727, 0.7727272727273], 0.36271808999: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.15702479339: [0.4545454545455, 0.5454545454545], 0.47107438017: [0.7272727272727, 0.2727272727273], 0.53076216713: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.85238751148: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.41804407714: [0.2272727272727, 0.7727272727273], 0.63269054178: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.97635445363: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.18181818182: [0.0], 0.8448117539: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.66046831956: [0.2272727272727, 0.7727272727273], 0.23966942149: [0.1818181818182, 0.8181818181818], 0.56313131313: [0.8333333333333, 0.1666666666667], 0.91391184573: [0.6818181818182, 0.3181818181818], 0.75757575758: [0.0], 0.46189164371: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.96258034894: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.89187327824: [0.0454545454545, 0.9545454545455], 0.22520661157: [0.0454545454545, 0.9545454545455], 0.80348943985: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.87052341598: [0.0909090909091, 0.9090909090909], 0.54453627181: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.65840220386: [0.0909090909091, 0.9090909090909], 0.99173553719: [0.0909090909091, 0.9090909090909], 0.12786960514: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.80280073462: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152]}
averages_odd={0.0: [0.0], 0.25: [0.5], 0.89439853076: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.58585858586: [0.3333333333333, 0.6666666666667], 0.49426078972: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.641184573: [0.6818181818182, 0.3181818181818], 0.34825528007: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.29958677686: [0.8636363636364, 0.1363636363636], 0.59986225895: [0.2272727272727, 0.7727272727273], 0.391184573: [0.1818181818182, 0.8181818181818], 0.88269054178: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.74035812672: [0.0454545454545, 0.9545454545455], 0.64302112029: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.39784205693: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.08746556474: [0.8636363636364, 0.1363636363636], 0.86340679523: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.02754820937: [0.1818181818182, 0.8181818181818], 0.74931129477: [0.0909090909091, 0.9090909090909], 0.34343434343: [0.3333333333333, 0.6666666666667], 0.07369146006: [0.0454545454545, 0.9545454545455], 0.03764921947: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.52272727273: [0.5], 0.93572084481: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.85583103765: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.10743801653: [0.7272727272727, 0.2727272727273], 0.66391184573: [0.1818181818182, 0.8181818181818], 0.09825528007: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79338842975: [0.4545454545455, 0.5454545454545], 0.52456382002: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.0303030303: [0.0], 0.98278236915: [0.0454545454545, 0.9545454545455], 0.10583103765: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.76308539945: [0.4545454545455, 0.5454545454545], 0.16643709826: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.42607897153: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.15886134068: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.72107438017: [0.2272727272727, 0.7727272727273], 0.04683195592: [0.7272727272727, 0.2727272727273], 0.62373737374: [0.8333333333333, 0.1666666666667], 0.32713498623: [0.2272727272727, 0.7727272727273], 0.99931129477: [0.5909090909091, 0.4090909090909], 0.64370982553: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.19123048669: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.98530762167: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.26905417815: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.90197428834: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.37672176309: [0.0454545454545, 0.9545454545455], 0.27938475666: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.0927456382: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.63544536272: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.86409550046: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.32231404959: [0.3636363636364, 0.6363636363636], 0.0342056933: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.51423324151: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.57300275482: [0.1818181818182, 0.8181818181818], 0.66666666667: [0.0], 0.93847566575: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.42056932966: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.54545454546: [0.0], 0.0101010101: [0.3333333333333, 0.6666666666667], 0.27456382002: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.57966023875: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.60606060606: [0.0], 0.06542699725: [0.6818181818182, 0.3181818181818], 0.89531680441: [0.7272727272727, 0.2727272727273], 0.00183654729: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.02938475666: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.42148760331: [0.1818181818182, 0.8181818181818], 0.20500459137: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.15633608815: [0.6818181818182, 0.3181818181818], 0.41597796143: [0.0909090909091, 0.9090909090909], 0.58333333333: [0.5], 0.30693296602: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.84022038568: [0.0909090909091, 0.9090909090909], 0.2297979798: [0.8333333333333, 0.1666666666667], 0.18112947658: [0.5909090909091, 0.4090909090909], 0.18365472911: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.42424242424: [0.0], 0.5603764922: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.15151515152: [0.0], 0.89646464647: [0.8333333333333, 0.1666666666667], 0.35651974288: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.30785123967: [0.6818181818182, 0.3181818181818], 0.18572084481: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.77249770432: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.20592286501: [0.2272727272727, 0.7727272727273], 0.07552800735: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.12672176309: [0.4545454545455, 0.5454545454545], 0.78168044077: [0.2272727272727, 0.7727272727273], 0.65955004591: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.35743801653: [0.2272727272727, 0.7727272727273], 0.67607897153: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.89807162534: [0.3636363636364, 0.6363636363636], 0.90082644628: [0.0909090909091, 0.9090909090909], 0.03787878788: [0.5], 0.95247933884: [0.0454545454545, 0.9545454545455], 0.99724517906: [0.1818181818182, 0.8181818181818], 0.4012855831: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.00550964187: [0.4545454545455, 0.5454545454545], 0.08815426997: [0.1818181818182, 0.8181818181818], 0.7842056933: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.60789715335: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.0241046832: [0.2272727272727, 0.7727272727273], 0.00390266299: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.01561065197: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.24977043159: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.45087235996: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.68227731864: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.11019283747: [0.3636363636364, 0.6363636363636], 0.64026629936: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.29476584022: [0.0909090909091, 0.9090909090909], 0.2196969697: [0.5], 0.79453627181: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.00665748393: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.90840220386: [0.5909090909091, 0.4090909090909], 0.51997245179: [0.6818181818182, 0.3181818181818], 0.68319559229: [0.7272727272727, 0.2727272727273], 0.74219467401: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.28764921947: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.08999081726: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.78696051423: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.83379247016: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.68595041322: [0.3636363636364, 0.6363636363636], 0.61914600551: [0.0454545454545, 0.9545454545455], 0.52180899908: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.87258953168: [0.2272727272727, 0.7727272727273], 0.33723599633: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.9797979798: [0.6666666666667, 0.3333333333333], 0.38016528926: [0.7272727272727, 0.2727272727273], 0.76698806244: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.66299357208: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.07070707071: [0.3333333333333, 0.6666666666667], 0.48415977961: [0.5909090909091, 0.4090909090909], 0.6209825528: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.52525252525: [0.3333333333333, 0.6666666666667], 0.38682277319: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.31955922865: [0.7272727272727, 0.2727272727273], 0.42975206612: [0.4545454545455, 0.5454545454545], 0.49035812672: [0.4545454545455, 0.5454545454545], 0.33241505969: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.46395775941: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.56749311295: [0.0909090909091, 0.9090909090909], 0.87511478421: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.05991735537: [0.5909090909091, 0.4090909090909], 0.09090909091: [0.0], 0.64187327824: [0.4545454545455, 0.5454545454545], 0.13613406795: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.01767676768: [0.8333333333333, 0.1666666666667], 0.50252525253: [0.8333333333333, 0.1666666666667], 0.67217630854: [0.4545454545455, 0.5454545454545], 0.39302112029: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.84779614325: [0.5909090909091, 0.4090909090909], 0.16092745638: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.82483930211: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.92217630854: [0.0454545454545, 0.9545454545455], 0.31611570248: [0.0454545454545, 0.9545454545455], 0.99288337925: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.76767676768: [0.3333333333333, 0.6666666666667], 0.58310376492: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79729109275: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.26170798898: [0.3636363636364, 0.6363636363636], 0.4738292011: [0.3636363636364, 0.6363636363636], 0.90817263545: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.71349862259: [0.7272727272727, 0.2727272727273], 0.93939393939: [0.0], 0.35996326905: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.62258953168: [0.7272727272727, 0.2727272727273], 0.13131313131: [0.3333333333333, 0.6666666666667], 0.09022038568: [0.5909090909091, 0.4090909090909], 0.5805785124: [0.6818181818182, 0.3181818181818], 0.91643709826: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.86616161616: [0.8333333333333, 0.1666666666667], 0.40955004591: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.3608815427: [0.1818181818182, 0.8181818181818], 0.80096418733: [0.0454545454545, 0.9545454545455], 0.67975206612: [0.0454545454545, 0.9545454545455], 0.96602387512: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.81106519743: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.6632231405: [0.8636363636364, 0.1363636363636], 0.41046831956: [0.7272727272727, 0.2727272727273], 0.54729109275: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.04522497704: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.12603305785: [0.6818181818182, 0.3181818181818], 0.19490358127: [0.0454545454545, 0.9545454545455], 0.43434343434: [0.3333333333333, 0.6666666666667], 0.84228650138: [0.2272727272727, 0.7727272727273], 0.75137741047: [0.2272727272727, 0.7727272727273], 0.1994949495: [0.8333333333333, 0.1666666666667], 0.69605142332: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.30303030303: [0.0], 0.15082644628: [0.5909090909091, 0.4090909090909], 0.38567493113: [0.0909090909091, 0.9090909090909], 0.72727272727: [0.0], 0.29591368228: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.28007346189: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.17561983471: [0.2272727272727, 0.7727272727273], 0.91092745638: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.60514233242: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.63567493113: [0.5909090909091, 0.4090909090909], 0.5383379247: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.29683195592: [0.2272727272727, 0.7727272727273], 0.24908172635: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.49242424242: [0.5], 0.8044077135: [0.7272727272727, 0.2727272727273], 0.76423324151: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.34641873278: [0.0454545454545, 0.9545454545455], 0.87603305785: [0.1818181818182, 0.8181818181818], 0.3406795225: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.29201101928: [0.3636363636364, 0.6363636363636], 0.73668503214: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.30486685032: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.75688705234: [0.5909090909091, 0.4090909090909], 0.18089990817: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.21946740129: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.09756657484: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.17079889807: [0.3636363636364, 0.6363636363636], 0.50413223141: [0.3636363636364, 0.6363636363636], 0.95431588613: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.51905417815: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.67676767677: [0.3333333333333, 0.6666666666667], 0.18939393939: [0.5], 0.43985307622: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.96900826446: [0.5909090909091, 0.4090909090909], 0.75665748393: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.75941230487: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.56473829201: [0.3636363636364, 0.6363636363636], 0.76492194674: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.27662993572: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.23898071625: [0.8636363636364, 0.1363636363636], 0.85858585859: [0.6666666666667, 0.3333333333333], 0.54178145087: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.47222222222: [0.8333333333333, 0.1666666666667], 0.42355371901: [0.5909090909091, 0.4090909090909], 0.03512396694: [0.6818181818182, 0.3181818181818], 0.32621671258: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.27754820937: [0.6818181818182, 0.3181818181818], 0.45179063361: [0.1818181818182, 0.8181818181818], 0.97727272727: [0.5], 0.36914600551: [0.4545454545455, 0.5454545454545], 0.71625344353: [0.3636363636364, 0.6363636363636], 0.06795224977: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.79797979798: [0.6666666666667, 0.3333333333333], 0.69628099174: [0.5909090909091, 0.4090909090909], 0.59894398531: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.01308539945: [0.0454545454545, 0.9545454545455], 0.94880624426: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.6733241506: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.06818181818: [0.5], 0.59504132231: [0.3636363636364, 0.6363636363636], 0.63636363636: [0.0], 0.82369146006: [0.4545454545455, 0.5454545454545], 0.23415977961: [0.0909090909091, 0.9090909090909], 0.60330578512: [0.1818181818182, 0.8181818181818], 0.13062442608: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94490358127: [0.4545454545455, 0.5454545454545], 0.86157024793: [0.0454545454545, 0.9545454545455], 0.95867768595: [0.3636363636364, 0.6363636363636], 0.71900826446: [0.0909090909091, 0.9090909090909], 0.15541781451: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.03213957759: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.02318640955: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.05348943985: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.55303030303: [0.5], 0.81818181818: [0.0], 0.50137741047: [0.7272727272727, 0.2727272727273], 0.65404040404: [0.8333333333333, 0.1666666666667], 0.82208448118: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.40335169881: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.0948117539: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.90289256198: [0.2272727272727, 0.7727272727273], 0.25734618916: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.34894398531: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.30027548209: [0.1818181818182, 0.8181818181818], 0.13682277319: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.0826446281: [0.0909090909091, 0.9090909090909], 0.5585399449: [0.0454545454545, 0.9545454545455], 0.88888888889: [0.6666666666667, 0.3333333333333], 0.2479338843: [0.4545454545455, 0.5454545454545], 0.55555555556: [0.3333333333333, 0.6666666666667], 0.97704315886: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.34986225895: [0.7272727272727, 0.2727272727273], 0.15059687787: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.00941230487: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.25252525253: [0.3333333333333, 0.6666666666667], 0.60261707989: [0.8636363636364, 0.1363636363636], 0.39944903581: [0.4545454545455, 0.5454545454545], 0.94421487603: [0.6818181818182, 0.3181818181818], 0.73209366391: [0.6818181818182, 0.3181818181818], 0.17355371901: [0.0909090909091, 0.9090909090909], 0.10101010101: [0.3333333333333, 0.6666666666667], 0.88544536272: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.70247933884: [0.4545454545455, 0.5454545454545], 0.58126721763: [0.4545454545455, 0.5454545454545], 0.14531680441: [0.2272727272727, 0.7727272727273], 0.7339302112: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.75045913682: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.85514233242: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.53925619835: [0.2272727272727, 0.7727272727273], 0.21877869605: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.49977043159: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.87534435262: [0.8636363636364, 0.1363636363636], 0.43181818182: [0.5], 0.01492194674: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.97910927456: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.28581267218: [0.0454545454545, 0.9545454545455], 0.74380165289: [0.7272727272727, 0.2727272727273], 0.07001836547: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.48140495868: [0.8636363636364, 0.1363636363636], 0.61547291093: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.88820018366: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.44077134986: [0.7272727272727, 0.2727272727273], 0.85399449036: [0.4545454545455, 0.5454545454545], 0.18916437098: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.16161616162: [0.3333333333333, 0.6666666666667], 0.32966023875: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.14049586777: [0.3636363636364, 0.6363636363636], 0.71464646465: [0.8333333333333, 0.1666666666667], 0.43365472911: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.86776859504: [0.3636363636364, 0.6363636363636], 0.96143250689: [0.0909090909091, 0.9090909090909], 0.47658402204: [0.0909090909091, 0.9090909090909], 0.21395775941: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.99632690542: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.83310376492: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.37924701561: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.96349862259: [0.2272727272727, 0.7727272727273], 0.3305785124: [0.1818181818182, 0.8181818181818], 0.70179063361: [0.6818181818182, 0.3181818181818], 0.05968778696: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.63820018366: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.19834710744: [0.7272727272727, 0.2727272727273], 0.88636363636: [0.5], 0.6935261708: [0.8636363636364, 0.1363636363636], 0.66850321396: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.03581267218: [0.4545454545455, 0.5454545454545], 0.20867768595: [0.8636363636364, 0.1363636363636], 0.76239669422: [0.6818181818182, 0.3181818181818], 0.73737373737: [0.3333333333333, 0.6666666666667], 0.72359963269: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.41161616162: [0.8333333333333, 0.1666666666667], 0.73484848485: [0.5], 0.3629476584: [0.5909090909091, 0.4090909090909], 0.26561065197: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.00734618916: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.84848484849: [0.0], 0.07713498623: [0.7272727272727, 0.2727272727273], 0.01928374656: [0.3636363636364, 0.6363636363636], 0.11753902663: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.46120293848: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.57506887052: [0.5909090909091, 0.4090909090909], 0.8347107438: [0.7272727272727, 0.2727272727273], 0.2277318641: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.26652892562: [0.2272727272727, 0.7727272727273], 0.17906336088: [0.1818181818182, 0.8181818181818], 0.92676767677: [0.8333333333333, 0.1666666666667], 0.98737373737: [0.8333333333333, 0.1666666666667], 0.46212121212: [0.5], 0.11409550046: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.20385674931: [0.0909090909091, 0.9090909090909], 0.48668503214: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.3103764922: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.78443526171: [0.8636363636364, 0.1363636363636], 0.22865013774: [0.7272727272727, 0.2727272727273], 0.54269972452: [0.1818181818182, 0.8181818181818], 0.59779614325: [0.0909090909091, 0.9090909090909], 0.12511478421: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.88429752066: [0.4545454545455, 0.5454545454545], 0.24724517906: [0.6818181818182, 0.3181818181818], 0.81542699725: [0.1818181818182, 0.8181818181818], 0.69696969697: [0.0], 0.39026629936: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.3427456382: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.82575757576: [0.5], 0.91574839302: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.17470156107: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.48966942149: [0.6818181818182, 0.3181818181818], 0.59228650138: [0.7272727272727, 0.2727272727273], 0.05785123967: [0.1818181818182, 0.8181818181818], 0.82001836547: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.28925619835: [0.7272727272727, 0.2727272727273], 0.85606060606: [0.5], 0.73117539027: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.44191919192: [0.8333333333333, 0.1666666666667], 0.39325068871: [0.5909090909091, 0.4090909090909], 0.12121212121: [0.0], 0.06060606061: [0.0], 0.00757575758: [0.5], 0.06611570248: [0.4545454545455, 0.5454545454545], 0.0847107438: [0.2272727272727, 0.7727272727273], 0.33884297521: [0.4545454545455, 0.5454545454545], 0.6108815427: [0.6818181818182, 0.3181818181818], 0.49150596878: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.00482093664: [0.6818181818182, 0.3181818181818], 0.62534435262: [0.3636363636364, 0.6363636363636], 0.97153351699: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.77685950413: [0.3636363636364, 0.6363636363636], 0.71005509642: [0.0454545454545, 0.9545454545455], 0.88360881543: [0.6818181818182, 0.3181818181818], 0.61271808999: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.77525252525: [0.8333333333333, 0.1666666666667], 0.97520661157: [0.4545454545455, 0.5454545454545], 0.18847566575: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.71189164371: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.95500459137: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.87786960514: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.88613406795: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.16460055096: [0.0454545454545, 0.9545454545455], 0.46946740129: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.56106519743: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.42079889807: [0.8636363636364, 0.1363636363636], 0.73278236915: [0.4545454545455, 0.5454545454545], 0.97359963269: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.03971533517: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.06726354454: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.69077134986: [0.2272727272727, 0.7727272727273], 0.59343434343: [0.8333333333333, 0.1666666666667], 0.57759412305: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.3730486685: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.46464646465: [0.3333333333333, 0.6666666666667], 0.8145087236: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.10399449036: [0.0454545454545, 0.9545454545455], 0.04591368228: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.31864095501: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.78719008265: [0.5909090909091, 0.4090909090909], 0.26997245179: [0.1818181818182, 0.8181818181818], 0.73461891644: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.93112947658: [0.0909090909091, 0.9090909090909], 0.20844811754: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.93595041322: [0.8636363636364, 0.1363636363636], 0.51698806244: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.07988980716: [0.3636363636364, 0.6363636363636], 0.57231404959: [0.8636363636364, 0.1363636363636], 0.72451790634: [0.1818181818182, 0.8181818181818], 0.17837465565: [0.8636363636364, 0.1363636363636], 0.96694214876: [0.1818181818182, 0.8181818181818], 0.61616161616: [0.3333333333333, 0.6666666666667], 0.67148760331: [0.6818181818182, 0.3181818181818], 0.96625344353: [0.8636363636364, 0.1363636363636], 0.3023415978: [0.5909090909091, 0.4090909090909], 0.45385674931: [0.5909090909091, 0.4090909090909], 0.52066115703: [0.4545454545455, 0.5454545454545], 0.40059687787: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.24426078972: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.14876033058: [0.1818181818182, 0.8181818181818], 0.91299357208: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.40151515152: [0.5], 0.22222222222: [0.3333333333333, 0.6666666666667], 0.25550964187: [0.0454545454545, 0.9545454545455], 0.39577594123: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.04338842975: [0.0454545454545, 0.9545454545455], 0.47865013774: [0.2272727272727, 0.7727272727273], 0.97451790634: [0.6818181818182, 0.3181818181818], 0.45110192838: [0.8636363636364, 0.1363636363636], 0.78787878788: [0.0], 0.55486685032: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94329660239: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.04958677686: [0.3636363636364, 0.6363636363636], 0.45638200184: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.29935720845: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.47015610652: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.49494949495: [0.3333333333333, 0.6666666666667], 0.44628099174: [0.0909090909091, 0.9090909090909], 0.88062442608: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.57575757576: [0.0], 0.30211202939: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.99908172635: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.24173553719: [0.5909090909091, 0.4090909090909], 0.94605142332: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.82300275482: [0.6818181818182, 0.3181818181818], 0.14439853076: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.50688705234: [0.0909090909091, 0.9090909090909], 0.63292011019: [0.8636363636364, 0.1363636363636], 0.83126721763: [0.0454545454545, 0.9545454545455], 0.96877869605: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.48117539027: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.51515151515: [0.0], 0.7741046832: [0.7272727272727, 0.2727272727273], 0.16919191919: [0.8333333333333, 0.1666666666667], 0.93319559229: [0.2272727272727, 0.7727272727273], 0.60996326905: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.38131313131: [0.8333333333333, 0.1666666666667], 0.51147842057: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.3326446281: [0.5909090909091, 0.4090909090909], 0.89370982553: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.2782369146: [0.4545454545455, 0.5454545454545], 0.43089990817: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.04797979798: [0.8333333333333, 0.1666666666667], 0.02961432507: [0.5909090909091, 0.4090909090909], 0.85032139578: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.9696969697: [0.0], 0.65564738292: [0.3636363636364, 0.6363636363636], 0.58884297521: [0.0454545454545, 0.9545454545455], 0.81473829201: [0.8636363636364, 0.1363636363636], 0.9494949495: [0.6666666666667, 0.3333333333333], 0.70454545455: [0.5], 0.15817263545: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.75482093664: [0.1818181818182, 0.8181818181818], 0.5906795225: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.05716253444: [0.8636363636364, 0.1363636363636], 0.78076216713: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.13429752066: [0.0454545454545, 0.9545454545455], 0.3085399449: [0.4545454545455, 0.5454545454545], 0.87167125804: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.51446280992: [0.5909090909091, 0.4090909090909], 0.81726354454: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.92837465565: [0.3636363636364, 0.6363636363636], 0.66574839302: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.3709825528: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.15909090909: [0.5], 0.50045913682: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.56955922865: [0.2272727272727, 0.7727272727273], 0.85330578512: [0.6818181818182, 0.3181818181818], 0.11845730028: [0.1818181818182, 0.8181818181818], 0.3124426079: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.35537190083: [0.0909090909091, 0.9090909090909], 0.15335169881: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.95592286501: [0.7272727272727, 0.2727272727273], 0.2580348944: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.45936639119: [0.6818181818182, 0.3181818181818], 0.92561983471: [0.7272727272727, 0.2727272727273], 0.48209366391: [0.1818181818182, 0.8181818181818], 0.4536271809: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.80991735537: [0.0909090909091, 0.9090909090909], 0.17814508724: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.25895316804: [0.7272727272727, 0.2727272727273], 0.67056932966: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.45454545455: [0.0], 0.90564738292: [0.8636363636364, 0.1363636363636], 0.98898071625: [0.3636363636364, 0.6363636363636], 0.74494949495: [0.8333333333333, 0.1666666666667], 0.02662993572: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.55027548209: [0.6818181818182, 0.3181818181818], 0.05693296602: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.64944903581: [0.0454545454545, 0.9545454545455], 0.55211202939: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.24632690542: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.64646464647: [0.3333333333333, 0.6666666666667], 0.68870523416: [0.0909090909091, 0.9090909090909], 0.10032139578: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.69329660239: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.94696969697: [0.5], 0.34090909091: [0.5], 0.14784205693: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.02203856749: [0.0909090909091, 0.9090909090909], 0.5080348944: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.08379247016: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.35261707989: [0.3636363636364, 0.6363636363636], 0.14325068871: [0.0909090909091, 0.9090909090909], 0.43916437098: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.39049586777: [0.8636363636364, 0.1363636363636], 0.85789715335: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.94123048669: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.61363636364: [0.5], 0.16804407714: [0.7272727272727, 0.2727272727273], 0.57483930211: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.48875114784: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.63016528926: [0.2272727272727, 0.7727272727273], 0.53282828283: [0.8333333333333, 0.1666666666667], 0.24150596878: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.10858585859: [0.8333333333333, 0.1666666666667], 0.09641873278: [0.4545454545455, 0.5454545454545], 0.10651974288: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.67401285583: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.74655647383: [0.3636363636364, 0.6363636363636], 0.2883379247: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.4839302112: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.21763085399: [0.4545454545455, 0.5454545454545], 0.0544077135: [0.2272727272727, 0.7727272727273], 0.09573002755: [0.6818181818182, 0.3181818181818], 0.70638200184: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.27180899908: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.51170798898: [0.8636363636364, 0.1363636363636], 0.23622589532: [0.2272727272727, 0.7727272727273], 0.99380165289: [0.2272727272727, 0.7727272727273], 0.84756657484: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.65289256198: [0.7272727272727, 0.2727272727273], 0.0624426079: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.13888888889: [0.8333333333333, 0.1666666666667], 0.32070707071: [0.8333333333333, 0.1666666666667], 0.27203856749: [0.5909090909091, 0.4090909090909], 0.39393939394: [0.0], 0.94674012856: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.54201101928: [0.8636363636364, 0.1363636363636], 0.93227731864: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.46763085399: [0.0454545454545, 0.9545454545455], 0.37029384757: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.4132231405: [0.3636363636364, 0.6363636363636], 0.72910927456: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.69880624426: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.53443526171: [0.3636363636364, 0.6363636363636], 0.37121212121: [0.5], 0.23140495868: [0.3636363636364, 0.6363636363636], 0.82828282828: [0.6666666666667, 0.3333333333333], 0.36547291093: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.63360881543: [0.1818181818182, 0.8181818181818], 0.87809917355: [0.5909090909091, 0.4090909090909], 0.20936639119: [0.1818181818182, 0.8181818181818], 0.80555555556: [0.8333333333333, 0.1666666666667], 0.84573002755: [0.1818181818182, 0.8181818181818], 0.91919191919: [0.6666666666667, 0.3333333333333], 0.66597796143: [0.5909090909091, 0.4090909090909], 0.07828282828: [0.8333333333333, 0.1666666666667], 0.56864095501: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.80716253444: [0.3636363636364, 0.6363636363636], 0.26446280992: [0.0909090909091, 0.9090909090909], 0.12878787879: [0.5], 0.53719008265: [0.0909090909091, 0.9090909090909], 0.25183654729: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.90633608815: [0.1818181818182, 0.8181818181818], 0.49219467401: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.87878787879: [0.0], 0.27272727273: [0.0], 0.44742883379: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.68985307622: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.39876033058: [0.6818181818182, 0.3181818181818], 0.77318640955: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.22704315886: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.03696051423: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.78971533517: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.44834710744: [0.2272727272727, 0.7727272727273], 0.54935720845: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.35101010101: [0.8333333333333, 0.1666666666667], 0.09848484849: [0.5], 0.82759412305: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.79269972452: [0.6818181818182, 0.3181818181818], 0.4979338843: [0.0454545454545, 0.9545454545455], 0.46005509642: [0.4545454545455, 0.5454545454545], 0.76515151515: [0.5], 0.91850321396: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.1230486685: [0.7575757575758, 0.2424242424242, 0.5757575757576, 0.4242424242424], 0.4435261708: [0.3636363636364, 0.6363636363636], 0.59136822773: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.19742883379: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.71258034894: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.93663911846: [0.1818181818182, 0.8181818181818], 0.69421487603: [0.1818181818182, 0.8181818181818], 0.5282369146: [0.0454545454545, 0.9545454545455], 0.83746556474: [0.3636363636364, 0.6363636363636], 0.21602387512: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.98622589532: [0.7272727272727, 0.2727272727273], 0.23875114784: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.54476584022: [0.5909090909091, 0.4090909090909], 0.84136822773: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.53007346189: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.2803030303: [0.5], 0.77961432507: [0.0909090909091, 0.9090909090909], 0.62809917355: [0.0909090909091, 0.9090909090909], 0.24242424242: [0.0], 0.52249770432: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.37855831038: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.45844811754: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.32988980716: [0.8636363636364, 0.1363636363636], 0.21694214876: [0.6818181818182, 0.3181818181818], 0.13774104683: [0.7272727272727, 0.2727272727273], 0.98461891644: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.7036271809: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.42814508724: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.83585858586: [0.8333333333333, 0.1666666666667], 0.50895316804: [0.2272727272727, 0.7727272727273], 0.11776859504: [0.8636363636364, 0.1363636363636], 0.28213957759: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.21120293848: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.37373737374: [0.3333333333333, 0.6666666666667], 0.32506887052: [0.0909090909091, 0.9090909090909], 0.55280073462: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.4777318641: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.42906336088: [0.6818181818182, 0.3181818181818], 0.4233241506: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.18732782369: [0.4545454545455, 0.5454545454545], 0.57208448118: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.65197428834: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.58516988062: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.76147842057: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.72635445363: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.21212121212: [0.0], 0.74288337925: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.53168044077: [0.7272727272727, 0.2727272727273], 0.68434343434: [0.8333333333333, 0.1666666666667], 0.62924701561: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.2601010101: [0.8333333333333, 0.1666666666667], 0.82552800735: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.67424242424: [0.5], 0.75413223141: [0.8636363636364, 0.1363636363636], 0.91666666667: [0.5], 0.40702479339: [0.0454545454545, 0.9545454545455], 0.30968778696: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.61340679523: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.36363636364: [0.0], 0.0404040404: [0.3333333333333, 0.6666666666667], 0.92470156107: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.99655647383: [0.8636363636364, 0.1363636363636], 0.62167125804: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.0645087236: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.31060606061: [0.5], 0.20110192838: [0.3636363636364, 0.6363636363636], 0.70707070707: [0.6666666666667, 0.3333333333333], 0.07621671258: [0.2121212121212, 0.1212121212121, 0.7878787878788, 0.8787878787879], 0.51239669422: [0.1818181818182, 0.8181818181818], 0.40886134068: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.79178145087: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.36019283747: [0.8636363636364, 0.1363636363636], 0.1129476584: [0.0909090909091, 0.9090909090909], 0.61157024793: [0.4545454545455, 0.5454545454545], 0.12855831038: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.6512855831: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.90541781451: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.40404040404: [0.3333333333333, 0.6666666666667], 0.84504132231: [0.8636363636364, 0.1363636363636], 0.64393939394: [0.5], 0.28282828283: [0.3333333333333, 0.6666666666667], 0.78512396694: [0.1818181818182, 0.8181818181818], 0.60238751148: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.95707070707: [0.8333333333333, 0.1666666666667], 0.33815426997: [0.6818181818182, 0.3181818181818], 0.72658402204: [0.5909090909091, 0.4090909090909], 0.70431588613: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.48484848485: [0.0], 0.19674012856: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.92401285583: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.72015610652: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.14807162534: [0.8636363636364, 0.1363636363636], 0.38774104683: [0.2272727272727, 0.7727272727273], 0.2904040404: [0.8333333333333, 0.1666666666667], 0.22153351699: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.33333333333: [0.0], 0.43732782369: [0.0454545454545, 0.9545454545455], 0.33999081726: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.43158861341: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.38292011019: [0.3636363636364, 0.6363636363636], 0.16712580349: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.93870523416: [0.5909090909091, 0.4090909090909], 0.91460055096: [0.4545454545455, 0.5454545454545], 0.0523415978: [0.0909090909091, 0.9090909090909], 0.7238292011: [0.8636363636364, 0.1363636363636], 0.02685950413: [0.8636363636364, 0.1363636363636], 0.75390266299: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.12029384757: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.86501377411: [0.7272727272727, 0.2727272727273], 0.77066115703: [0.0454545454545, 0.9545454545455], 0.19191919192: [0.3333333333333, 0.6666666666667], 0.33516988062: [0.4242424242424, 0.2424242424242, 0.5757575757576, 0.7575757575758], 0.70087235996: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.08723599633: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.79545454546: [0.5], 0.55096418733: [0.4545454545455, 0.5454545454545], 0.12052341598: [0.5909090909091, 0.4090909090909], 0.56198347107: [0.7272727272727, 0.2727272727273], 0.64577594123: [0.7424242424242, 0.0757575757576, 0.9242424242424, 0.2575757575758], 0.60537190083: [0.5909090909091, 0.4090909090909], 0.31795224977: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.26928374656: [0.8636363636364, 0.1363636363636], 0.81198347107: [0.2272727272727, 0.7727272727273], 0.23530762167: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.81749311295: [0.5909090909091, 0.4090909090909], 0.18663911846: [0.6818181818182, 0.3181818181818], 0.79522497704: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.90909090909: [0.0], 0.58241505969: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.36753902663: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.21143250689: [0.5909090909091, 0.4090909090909], 0.31313131313: [0.3333333333333, 0.6666666666667], 0.01652892562: [0.7272727272727, 0.2727272727273], 0.68158861341: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152], 0.41712580349: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.36845730028: [0.6818181818182, 0.3181818181818], 0.11501377411: [0.2272727272727, 0.7727272727273], 0.36271808999: [0.3030303030303, 0.030303030303, 0.6969696969697, 0.969696969697], 0.15702479339: [0.4545454545455, 0.5454545454545], 0.47107438017: [0.7272727272727, 0.2727272727273], 0.53076216713: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.85238751148: [0.1060606060606, 0.5606060606061, 0.8939393939394, 0.4393939393939], 0.41804407714: [0.2272727272727, 0.7727272727273], 0.63269054178: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.97635445363: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.18181818182: [0.0], 0.8448117539: [0.6060606060606, 0.9393939393939, 0.3939393939394, 0.0606060606061], 0.66046831956: [0.2272727272727, 0.7727272727273], 0.23966942149: [0.1818181818182, 0.8181818181818], 0.56313131313: [0.8333333333333, 0.1666666666667], 0.91391184573: [0.6818181818182, 0.3181818181818], 0.75757575758: [0.0], 0.46189164371: [0.1515151515152, 0.4848484848485, 0.8484848484848, 0.5151515151515], 0.96258034894: [0.6212121212121, 0.7121212121212, 0.2878787878788, 0.3787878787879], 0.89187327824: [0.0454545454545, 0.9545454545455], 0.22520661157: [0.0454545454545, 0.9545454545455], 0.80348943985: [0.1212121212121, 0.2121212121212, 0.7878787878788, 0.8787878787879], 0.87052341598: [0.0909090909091, 0.9090909090909], 0.54453627181: [0.969696969697, 0.3030303030303, 0.6969696969697, 0.030303030303], 0.65840220386: [0.0909090909091, 0.9090909090909], 0.99173553719: [0.0909090909091, 0.9090909090909], 0.12786960514: [0.469696969697, 0.8030303030303, 0.1969696969697, 0.530303030303], 0.80280073462: [0.9848484848485, 0.0151515151515, 0.3484848484848, 0.6515151515152]}
| 26,578.6
| 49,485
| 0.790598
| 16,444
| 132,893
| 6.388956
| 0.05242
| 0.002703
| 0.002085
| 0.01759
| 0.996631
| 0.995231
| 0.995231
| 0.993832
| 0.993832
| 0.992909
| 0
| 0.842216
| 0.061824
| 132,893
| 5
| 49,486
| 26,578.6
| 0.000441
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
db157b40e778caa58a61bbbc763cf3203e5e521a
| 135
|
py
|
Python
|
loldib/getratings/models/NA/na_zed/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zed/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_zed/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_zed_top import *
from .na_zed_jng import *
from .na_zed_mid import *
from .na_zed_bot import *
from .na_zed_sup import *
| 22.5
| 26
| 0.740741
| 25
| 135
| 3.6
| 0.36
| 0.333333
| 0.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185185
| 135
| 5
| 27
| 27
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e1eaae84cd8026e02e2ecbe43fe3fa29cb8114b6
| 14,785
|
py
|
Python
|
networks/raw_rnn.py
|
Fred1991/VAE-GMVAE
|
b7f123c86172710ac1f329e47e3470cb81ff3493
|
[
"Apache-2.0"
] | 193
|
2018-06-08T07:13:24.000Z
|
2022-03-05T03:27:42.000Z
|
networks/raw_rnn.py
|
Fred1991/VAE-GMVAE
|
b7f123c86172710ac1f329e47e3470cb81ff3493
|
[
"Apache-2.0"
] | 11
|
2019-01-10T10:41:30.000Z
|
2021-10-06T07:53:29.000Z
|
networks/raw_rnn.py
|
Fred1991/VAE-GMVAE
|
b7f123c86172710ac1f329e47e3470cb81ff3493
|
[
"Apache-2.0"
] | 34
|
2018-10-18T09:13:03.000Z
|
2021-11-15T11:31:56.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 13 11:17:53 2018
@author: psanch
"""
import tensorflow as tf
import utils.constants as const
from utils.utils import get1toT
from networks.base_raw_rnn import BaseRawRNN
from networks.dense_net import DenseNet
import utils.utils as utils
class RawRNNConcat(BaseRawRNN):
def __init__(self, cell_type, state_dim, input_, max_time, output_dim, reuse, drop_rate_x=0.,
kinit=tf.contrib.layers.xavier_initializer(),
bias_init=tf.constant_initializer(0.01), var_shared=False):
super().__init__(input_, max_time, output_dim, cell_type, state_dim, reuse, kinit, bias_init)
self.rnn_input_dim = self.input_dim + self.output_dim
self.drop_rate_x = drop_rate_x
self.act_out_mean = None
self.act_out_var = tf.nn.softplus
self.var_shared = var_shared
self.output_mean, self.output_var, self.output_z = self.my_build()
def my_build(self):
output_list, state_list = self.build(self.get_loop_fn())
outputs_mean = output_list[0]
outputs_var = output_list[1]
outputs_z = output_list[2]
states_all_c = state_list[0]
states_all_h = state_list[1]
print('Means: ', outputs_mean.get_shape().as_list())
print('Vars: ', outputs_var.get_shape().as_list())
print('Sampled z: ', outputs_z.get_shape().as_list())
print('States c: ', states_all_c.get_shape().as_list())
print('States h: ', states_all_h.get_shape().as_list())
return outputs_mean, outputs_var, outputs_z
def get_output_step(self, cell_output):
with tf.variable_scope('mean', reuse=tf.AUTO_REUSE):
mean_net = DenseNet(input_=cell_output,
hidden_dim=-1,
output_dim=self.output_dim,
num_layers=1,
transfer_fct=None,
act_out=self.act_out_mean,
reuse=tf.AUTO_REUSE,
kinit=self.kinit,
bias_init=self.bias_init)
mean = mean_net.output
with tf.variable_scope('var', reuse=tf.AUTO_REUSE):
if(self.var_shared):
var = utils.get_variable(self.output_dim, 'var')
var = tf.tile(var, [self.batch_size, 1])# [batch_size, var.dim]
else:
var_net = DenseNet(input_=cell_output,
hidden_dim=-1,
output_dim=self.output_dim,
num_layers=1,
transfer_fct=None,
act_out=self.act_out_var,
reuse=tf.AUTO_REUSE,
kinit=self.kinit,
bias_init=self.bias_init)
var = var_net.output
eps = tf.random_normal((self.batch_size, self.output_dim), 0, 1, dtype=tf.float32)
current_z = tf.add(mean, tf.multiply(tf.sqrt(var), eps))
return mean, var, current_z
def get_next_input(self, x_time, current_z):
with tf.variable_scope('aux', reuse=tf.AUTO_REUSE):
aux_net = DenseNet(input_=current_z,
hidden_dim=-1,
output_dim=self.output_dim,
num_layers=1,
transfer_fct=None,
act_out=tf.nn.sigmoid,
reuse=tf.AUTO_REUSE)
current_z = aux_net.output
return tf.concat([tf.layers.dropout(x_time, rate=self.drop_rate_x), current_z],1)
def get_loop_fn(self):
inputs_ta, output_ta = self.get_tensor_arrays(self.input_)
def loop_fn(time, cell_output, cell_state, loop_state):
elements_finished = (time >= self.max_time)
finished = tf.reduce_all(elements_finished)
if cell_output is None:
'''
time == 0, used for initialization before first call to cell
This is just to defined the desired shape of the tensors
'''
next_cell_state = self.cell.zero_state(self.batch_size, tf.float32)
'''
the emit_output in this case tells TF how future emits look
For the first call to loop_fn the emit_output corresponds to
the emit_structure which is then used to determine the size of
the zero_tensor for the emit_ta (defaults to cell.output_size).
'''
emit_output = tf.tuple([tf.zeros([self.output_dim]), tf.zeros([self.output_dim]),
tf.zeros([self.output_dim])])
# tf.zeros([config.batch_size, output_dim], dtype=tf.float32) # tf.zeros([output_dim])
next_loop_state = output_ta
'''
this is the initial step, i.e. there is no output from a previous time step, what we feed here
can highly depend on the data. In this case we just assign the actual input in the first time step.
'''
init_z = tf.zeros((self.batch_size, self.output_dim), dtype=tf.float32)
#init_z = tf.random_normal((config.batch_size, output_dim), 0, 1, dtype=tf.float32)
x_time = tf.layers.dropout(inputs_ta.read(time), rate= self.drop_rate_x)
next_in = tf.concat([x_time, init_z],1)
else:
'''
t > 0, called right after call to cell, i.e. cell_output is the output from time t-1.
here you can do whatever ou want with cell_output before assigning it to emit_output.
In this case, we don't do anything pass the last state to the next
'''
next_cell_state = cell_state
next_loop_state = self.get_next_loop_state(loop_state, cell_state, time)
'''Next Output'''
# cell_output = tf.Print(cell_output,[cell_output], message="cell_output")
mean, var, current_z = self.get_output_step(cell_output)
# current_z = tf.Print(current_z,[current_z], message="current z")
emit_output = tf.tuple([mean, var, current_z])
# tf.tuple([mean, var]) tf.concat([mean, var],1) cell_output mean
next_in = tf.cond(finished,
lambda: tf.zeros([self.batch_size, self.rnn_input_dim], dtype=tf.float32),
lambda: self.get_next_input(inputs_ta.read(time), current_z) )
next_input = tf.cond(finished,
lambda: tf.zeros([self.batch_size, self.rnn_input_dim], dtype=tf.float32),
lambda: next_in)
next_input.set_shape([None, self.rnn_input_dim])
return (finished, next_input, next_cell_state, emit_output, next_loop_state)
return loop_fn
'''
Inference Network for TVAE1
'''
'''
Generator Network for TVAE
'''
class RawRNNGener(BaseRawRNN):
def __init__(self, cell_type, state_dim, input_, max_time, output_dim, reuse,
kinit=tf.contrib.layers.xavier_initializer(),
bias_init=tf.constant_initializer(0.01), var_shared=False):
super().__init__(input_, max_time, output_dim, cell_type, state_dim, reuse, kinit, bias_init)
self.rnn_input_dim = self.output_dim
self.act_out_mean = None
self.act_out_var = tf.nn.softplus
self.var_shared = var_shared
self.is_sample = len(input_.get_shape().as_list())==2
self.is_time = not self.is_sample
self.output_mean, self.output_var, self.output_z = self.my_build()
def my_build(self):
loop_fn, inputs_ta = self.get_loop_fn()
output_list, state_list = self.build(loop_fn)
outputs_mean = output_list[0]
outputs_var = output_list[1]
outputs_z = output_list[2]
outputs_mean = get1toT(output_list[0], tf.zeros([self.batch_size, self.output_dim]), self.max_time)
outputs_var = get1toT(output_list[1], tf.ones([self.batch_size, self.output_dim]), self.max_time)
outputs_z = get1toT(output_list[2], self.input_, self.max_time)
if(self.is_sample):
outputs_z = get1toT(output_list[2], self.input_, self.max_time)
else:
outputs_z = get1toT(output_list[2], inputs_ta.read(0), self.max_time)
states_all_c = state_list[0]
states_all_h = state_list[1]
print('Means: ', outputs_mean.get_shape().as_list())
print('Vars: ', outputs_var.get_shape().as_list())
print('Sampled z: ', outputs_z.get_shape().as_list())
print('States c: ', states_all_c.get_shape().as_list())
print('States h: ', states_all_h.get_shape().as_list())
return outputs_mean, outputs_var, outputs_z
def get_output_step(self, cell_output):
with tf.variable_scope('mean', reuse=tf.AUTO_REUSE):
mean_net = DenseNet(input_=cell_output,
hidden_dim=-1,
output_dim=self.output_dim,
num_layers=1,
transfer_fct=None,
act_out=self.act_out_mean,
reuse=tf.AUTO_REUSE,
kinit=self.kinit,
bias_init=self.bias_init)
mean = mean_net.output
with tf.variable_scope('var', reuse=tf.AUTO_REUSE):
if(self.var_shared):
var = utils.get_variable(self.output_dim, 'var')
var = tf.tile(var, [self.batch_size, 1])# [batch_size, var.dim]
else:
var_net = DenseNet(input_=cell_output,
hidden_dim=-1,
output_dim=self.output_dim,
num_layers=1,
transfer_fct=None,
act_out=self.act_out_var,
reuse=tf.AUTO_REUSE,
kinit=self.kinit,
bias_init=self.bias_init)
var = var_net.output
eps = tf.random_normal((self.batch_size, self.output_dim), 0, 1, dtype=tf.float32)
current_z = tf.add(mean, tf.multiply(tf.sqrt(var), eps))
return mean, var, current_z
def get_next_input(self, x_time, current_z):
return
def get_loop_fn(self):
inputs_ta, output_ta = self.get_tensor_arrays(self.input_)
def loop_fn(time, cell_output, cell_state, loop_state):
elements_finished = (time >= self.max_time)
finished = tf.reduce_all(elements_finished)
if cell_output is None:
'''
time == 0, used for initialization before first call to cell
This is just to defined the desired shape of the tensors
'''
next_cell_state = self.cell.zero_state(self.batch_size, tf.float32)
'''
the emit_output in this case tells TF how future emits look
For the first call to loop_fn the emit_output corresponds to
the emit_structure which is then used to determine the size of
the zero_tensor for the emit_ta (defaults to cell.output_size).
'''
emit_output = tf.tuple([tf.zeros([self.output_dim]), tf.zeros([self.output_dim]),
tf.zeros([self.output_dim])])
# tf.zeros([config.batch_size, output_dim], dtype=tf.float32) # tf.zeros([output_dim])
next_loop_state = output_ta
'''
this is the initial step, i.e. there is no output from a previous time step, what we feed here
can highly depend on the data. In this case we just assign the actual input in the first time step.
'''
if(self.is_sample):
next_in = self.input_
else:
next_in = inputs_ta.read(time)
else:
'''
t > 0, called right after call to cell, i.e. cell_output is the output from time t-1.
here you can do whatever ou want with cell_output before assigning it to emit_output.
In this case, we don't do anything pass the last state to the next
'''
next_cell_state = cell_state
next_loop_state = self.get_next_loop_state(loop_state, cell_state, time)
'''Next Output'''
# cell_output = tf.Print(cell_output,[cell_output], message="cell_output")
mean, var, current_z = self.get_output_step(cell_output)
# current_z = tf.Print(current_z,[current_z], message="current z")
emit_output = tf.tuple([mean, var, current_z])
# tf.tuple([mean, var]) tf.concat([mean, var],1) cell_output mean
next_in = current_z
if(self.is_sample):
next_input = tf.cond(finished,
lambda: tf.zeros([self.batch_size, self.rnn_input_dim], dtype=tf.float32),
lambda: next_in)
else:
next_input = tf.cond(finished,
lambda: tf.zeros([self.batch_size, self.rnn_input_dim], dtype=tf.float32),
lambda: inputs_ta.read(time))
next_input.set_shape([None, self.rnn_input_dim])
return (finished, next_input, next_cell_state, emit_output, next_loop_state)
return loop_fn, inputs_ta
| 44.399399
| 115
| 0.532296
| 1,796
| 14,785
| 4.101336
| 0.114699
| 0.041542
| 0.035297
| 0.020907
| 0.880396
| 0.872115
| 0.850665
| 0.847271
| 0.841841
| 0.841841
| 0
| 0.010573
| 0.379506
| 14,785
| 333
| 116
| 44.399399
| 0.792348
| 0.054785
| 0
| 0.798969
| 0
| 0
| 0.00948
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061856
| false
| 0
| 0.030928
| 0.005155
| 0.154639
| 0.051546
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c029fa47119b09a492f81e9ed9688ebd9e63ddd6
| 72,195
|
py
|
Python
|
tests/test_encode.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 11
|
2020-01-08T14:49:24.000Z
|
2020-07-30T14:49:29.000Z
|
tests/test_encode.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 2
|
2020-01-08T13:38:27.000Z
|
2020-06-15T13:06:00.000Z
|
tests/test_encode.py
|
ondiekisteven/pyiso8583
|
b90b8b3c0e141baf81ffb658ddc782bad66dde73
|
[
"MIT"
] | 3
|
2020-01-16T13:34:28.000Z
|
2020-02-01T14:10:31.000Z
|
import copy
import pickle
import iso8583
import iso8583.specs
import pytest
spec = copy.deepcopy(iso8583.specs.default)
def test_EncodeError_exception():
"""
Validate EncodeError class
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 0
doc_dec = {"t": ""}
try:
iso8583.encode(doc_dec, spec=spec)
except iso8583.EncodeError as e:
assert e.doc_dec == doc_dec
assert e.doc_enc == ({})
assert e.msg == "Field data is required according to specifications"
assert e.field == "h"
assert (
e.args[0] == "Field data is required according to specifications: field h"
)
def test_EncodeError_exception_pickle():
"""
Validate EncodeError class with pickle
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 0
doc_dec = {"t": ""}
try:
iso8583.encode(doc_dec, spec=spec)
except iso8583.EncodeError as e:
p = pickle.dumps(e)
e_unpickled = pickle.loads(p)
assert e.doc_dec == e_unpickled.doc_dec
assert e.doc_enc == e_unpickled.doc_enc
assert e.msg == e_unpickled.msg
assert e.field == e_unpickled.field
assert e.args[0] == e_unpickled.args[0]
def test_non_string_field_keys():
"""
Input dictionary contains non
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "ascii"
spec["3"]["len_type"] = 2
spec["3"]["max_len"] = 10
spec["3"]["data_enc"] = "ascii"
spec["3"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", 2: "1122"}
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains invalid fields .2.: field p",
):
iso8583.encode(doc_dec, spec=spec)
doc_dec = {"h": "header", "t": "0210", 2: "1122", 3: "3344"}
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains invalid fields .2, 3.: field p",
):
iso8583.encode(doc_dec, spec=spec)
doc_dec = {"h": "header", "t": "0210", 2.5: "1122", 3.5: "3344"}
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains invalid fields .2.5, 3.5.: field p",
):
iso8583.encode(doc_dec, spec=spec)
doc_dec = {"h": "header", "t": "0210", 2.5: "1122", 3.5: "3344"}
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains invalid fields .2.5, 3.5.: field p",
):
iso8583.encode(doc_dec, spec=spec)
doc_dec = {"h": "header", "t": "0210", (1, 2): "1122", (3, 4): "3344"}
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains invalid fields ..1, 2., .3, 4..: field p",
):
iso8583.encode(doc_dec, spec=spec)
def test_input_type():
"""
Encode accepts only dict.
"""
s = b""
with pytest.raises(TypeError, match="Decoded ISO8583 data must be dict, not bytes"):
iso8583.encode(s, spec=spec)
def test_header_no_key():
"""
Message header is required and key is not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 0
doc_dec = {"t": ""}
with pytest.raises(
iso8583.EncodeError,
match="Field data is required according to specifications: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_header_ascii_absent():
"""
ASCII header is not required by spec and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_ascii_present():
"""
ASCII header is required by spec and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_ebcdic_absent():
"""
EBCDIC header is not required by spec and not provided
"""
spec["h"]["data_enc"] = "cp500"
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_ebcdic_present():
"""
EBCDIC header is required by spec and provided
"""
spec["h"]["data_enc"] = "cp500"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\x88\x85\x81\x84\x85\x990200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"\x88\x85\x81\x84\x85\x99"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_bdc_absent():
"""
BDC header is not required by spec and not provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_bcd_present():
"""
BCD header is required by spec and provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "A1A2A3A4A5A6", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\xA1\xA2\xA3\xA4\xA5\xA60200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"\xA1\xA2\xA3\xA4\xA5\xA6"
assert doc_dec["h"] == "A1A2A3A4A5A6"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_not_required_provided():
"""
String header is not required by spec but provided.
No error. Header is not included in the message.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_header_negative_missing():
"""
String header is required by spec but not provided.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0200"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 6: field h"
):
iso8583.encode(doc_dec, spec=spec)
def test_header_negative_partial():
"""
String header is required by spec but partially provided.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "head", "t": "0200"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 4 bytes, expecting 6: field h"
):
iso8583.encode(doc_dec, spec=spec)
def test_header_negative_incorrect_encoding():
"""
String header is required by spec and provided.
However, the spec encoding is not correct
"""
spec["h"]["data_enc"] = "invalid"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, unknown encoding specified: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_header_negative_incorrect_ascii_data():
"""
ASCII header is required by spec and provided.
However, the data is not ASCII
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {
"h": b"\xff\xff\xff\xff\xff\xff".decode("latin-1"),
"t": "0200",
}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, invalid data: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_header_negative_incorrect_bcd_data():
"""
BCD header is required by spec and provided.
However, the data is not hex
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, non-hex data: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_header_ascii_over_max():
"""
ASCII variable header is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_enc"] = "ascii"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
doc_dec = {"h": "header12", "t": "0210"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 8 bytes, larger than maximum 6: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_header_ascii_present():
"""
ASCII variable header is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_enc"] = "ascii"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"06header0210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"06"
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_ascii_present_zero_legnth():
"""
ASCII zero-length variable header
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_enc"] = "ascii"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"000210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"00"
assert doc_enc["h"]["data"] == b""
assert doc_dec["h"] == ""
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_ebcdic_over_max():
"""
EBCDIC variable header is required and over max provided
"""
spec["h"]["data_enc"] = "cp500"
spec["h"]["len_enc"] = "cp500"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
doc_dec = {"h": "header1", "t": "0210"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 7 bytes, larger than maximum 6: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_header_ebcdic_present():
"""
EBCDIC variable header is required and provided
"""
spec["h"]["data_enc"] = "cp500"
spec["h"]["len_enc"] = "cp500"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\xf0\xf6\x88\x85\x81\x84\x85\x990210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"\xf0\xf6"
assert doc_enc["h"]["data"] == b"\x88\x85\x81\x84\x85\x99"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_ebcdic_present_zero_legnth():
"""
EBCDIC zero-length variable header
"""
spec["h"]["data_enc"] = "cp500"
spec["h"]["len_enc"] = "cp500"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\xf0\xf00210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"\xf0\xf0"
assert doc_enc["h"]["data"] == b""
assert doc_dec["h"] == ""
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_bdc_over_max():
"""
BDC variable header is required and over max is provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = "bcd"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 2
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcdef", "t": "0210"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 3 bytes, larger than maximum 2: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_header_bdc_odd():
"""
BDC variable header is required and odd length is provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = "bcd"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcde", "t": "0210"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, odd-length hex data: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_header_bdc_ascii_length():
"""
BDC variable header
The length is in ASCII.
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = "ascii"
spec["h"]["len_type"] = 3
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcd", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"002\xab\xcd0210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"002"
assert doc_enc["h"]["data"] == b"\xab\xcd"
assert doc_dec["h"] == "abcd"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_bdc_ebcdic_length():
"""
BDC variable header is required and provided
The length is in EBCDIC.
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = "cp500"
spec["h"]["len_type"] = 3
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcd", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\xf0\xf0\xf2\xab\xcd0210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"\xf0\xf0\xf2"
assert doc_enc["h"]["data"] == b"\xab\xcd"
assert doc_dec["h"] == "abcd"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
# fmt: off
@pytest.mark.parametrize(
["len_enc"],
[
("b",),
("bcd",),
],
)
# fmt: on
def test_variable_header_bcd_present(len_enc: str):
"""
BCD variable header is required and provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = len_enc
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcd", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\x00\x02\xab\xcd0210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"\x00\x02"
assert doc_enc["h"]["data"] == b"\xab\xcd"
assert doc_dec["h"] == "abcd"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
# fmt: off
@pytest.mark.parametrize(
["len_enc"],
[
("b",),
("bcd",),
],
)
# fmt: on
def test_variable_header_bcd_present_zero_length(len_enc: str):
"""
BCD zero-length variable header is required and provided
"""
spec["h"]["data_enc"] = "b"
spec["h"]["len_enc"] = len_enc
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "", "t": "0210"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"\x00\x000210\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b"\x00\x00"
assert doc_enc["h"]["data"] == b""
assert doc_dec["h"] == ""
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_variable_header_incorrect_encoding():
"""
variable header is required and provided.
However, the spec encoding is not correct for length
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_enc"] = "invalid"
spec["h"]["len_type"] = 2
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "abcd", "t": "0210"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field length, unknown encoding specified: field h",
):
iso8583.encode(doc_dec, spec=spec)
def test_type_no_key():
"""
Message type is required and key is not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 0
doc_dec = {"h": "header", "2": ""}
with pytest.raises(iso8583.EncodeError, match="Field data is required: field t"):
iso8583.encode(doc_dec, spec=spec)
def test_type_ascii_absent():
"""
ASCII message type is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ascii_partial():
"""
ASCII message type is required and partial is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "02"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 2 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ascii_over_max():
"""
ASCII message type is required and over max is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "02101"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 5 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ascii_incorrect_data():
"""
ASCII message type is required and provided.
However, the data is not ASCII.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {
"h": "header",
"t": b"\xff\xff\xff\xff".decode("latin-1"),
}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, invalid data: field t",
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ascii_present():
"""
ASCII message type is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0200\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_type_ebcdic_absent():
"""
EBCDIC message type is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "cp500"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ebcdic_partial():
"""
EBCDIC message type is required and partial provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "cp500"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "02"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 2 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ebcdic_over_max():
"""
EBCDIC message type is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "cp500"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "02101"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 5 bytes, expecting 4: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_ebcdic_present():
"""
EBCDIC message type is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "cp500"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header\xf0\xf2\xf0\xf0\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"\xf0\xf2\xf0\xf0"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_type_bdc_absent():
"""
BDC message type is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_bdc_partial():
"""
BDC message type is required and partial is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "02"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_bdc_over_max():
"""
BDC message type is required and over max is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "021000"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field t"
):
iso8583.encode(doc_dec, spec=spec)
def test_type_bdc_odd():
"""
BDC message type is required and odd length is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "021"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, odd-length hex data: field t",
):
iso8583.encode(doc_dec, spec=spec)
def test_type_bdc_non_hex():
"""
BDC message type is required and provided
However, the data is not hex
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "021x"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, non-hex data: field t",
):
iso8583.encode(doc_dec, spec=spec)
def test_type_bcd_present():
"""
BCD message type is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "b"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"\x02\x00"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "0000000000000000"
assert doc_enc.keys() == set(["h", "t", "p"])
assert doc_dec.keys() == set(["h", "t", "p"])
def test_type_incorrect_encoding():
"""
String message type is required and provided.
However, the spec encoding is not correct
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "invalid"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, unknown encoding specified: field t",
):
iso8583.encode(doc_dec, spec=spec)
def test_bitmap_range():
"""
ISO8583 bitmaps must be between 1 and 128.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0200"}
doc_dec["0"] = ""
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains fields outside of 1-128 range .0.: field p",
):
iso8583.encode(doc_dec, spec=spec)
del doc_dec["0"]
doc_dec["129"] = ""
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains fields outside of 1-128 range .129.: field p",
):
iso8583.encode(doc_dec, spec=spec)
for f in range(0, 130):
doc_dec[str(f)] = ""
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains fields outside of 1-128 range .0, 129.: field p",
):
iso8583.encode(doc_dec, spec=spec)
for f in range(0, 131):
doc_dec[str(f)] = ""
with pytest.raises(
iso8583.EncodeError,
match="Dictionary contains fields outside of 1-128 range .0, 129, 130.: field p",
):
iso8583.encode(doc_dec, spec=spec)
def test_bitmap_remove_secondary():
"""
If 65-128 fields are not in bitmap then remove field 1.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "ascii"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 19
doc_dec = {
"h": "header",
"t": "0200",
"1": "not needed",
"2": "1234567890",
}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0200\x40\x00\x00\x00\x00\x00\x00\x00101234567890"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"10"
assert doc_enc["2"]["data"] == b"1234567890"
assert doc_dec["2"] == "1234567890"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_bitmap_add_secondary():
"""
If one of 65-128 fields are in bitmap then add field 1.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["66"]["data_enc"] = "ascii"
spec["66"]["len_enc"] = "ascii"
spec["66"]["len_type"] = 2
spec["66"]["max_len"] = 19
doc_dec = {
"h": "header",
"t": "0200",
"66": "1234567890",
}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert (
s
== b"header0200\x80\x00\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00101234567890"
)
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0200"
assert doc_dec["t"] == "0200"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x80\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "8000000000000000"
assert doc_enc["1"]["len"] == b""
assert doc_enc["1"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["1"] == "4000000000000000"
assert doc_enc["66"]["len"] == b"10"
assert doc_enc["66"]["data"] == b"1234567890"
assert doc_dec["66"] == "1234567890"
assert doc_enc.keys() == set(["h", "t", "p", "1", "66"])
assert doc_dec.keys() == set(["h", "t", "p", "1", "66"])
def test_primary_bitmap_incorrect_encoding():
"""
Incorrect encoding specified for primary bitmap
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "invalid"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 0
doc_dec = {"h": "header", "t": "0210", "2": ""}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, unknown encoding specified: field p",
):
iso8583.encode(doc_dec, spec=spec)
def test_secondary_bitmap_incorrect_encoding():
"""
Incorrect encoding specified for secondary bitmap
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["len_type"] = 0
spec["1"]["max_len"] = 16
spec["1"]["data_enc"] = "invalid"
doc_dec = {"h": "header", "t": "0210", "65": ""}
with pytest.raises(
iso8583.EncodeError,
match="ailed to encode field, unknown encoding specified: field 1",
):
iso8583.encode(doc_dec, spec=spec)
def test_bitmaps_ascii():
"""
Field is required and not key provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["data_enc"] = "ascii"
spec["105"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "105": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header021080000000000000000000000000800000000"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"8000000000000000"
assert doc_dec["p"] == "8000000000000000"
assert doc_enc["1"]["len"] == b""
assert doc_enc["1"]["data"] == b"0000000000800000"
assert doc_dec["1"] == "0000000000800000"
assert doc_enc["105"]["len"] == b"000"
assert doc_enc["105"]["data"] == b""
assert doc_dec["105"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "1", "105"])
assert doc_dec.keys() == set(["h", "t", "p", "1", "105"])
def test_bitmaps_ebcidic():
"""
Field is required and not key provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "cp500"
spec["1"]["data_enc"] = "cp500"
spec["105"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "105": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert (
s
== b"header0210\xf8\xf0\xf0\xf0\xf0\xf0\xf0\xf0"
+ b"\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0"
+ b"\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf8\xf0\xf0\xf0\xf0\xf0000"
)
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert (
doc_enc["p"]["data"]
== b"\xf8\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0"
)
assert doc_dec["p"] == "8000000000000000"
assert doc_enc["1"]["len"] == b""
assert (
doc_enc["1"]["data"]
== b"\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf0\xf8\xf0\xf0\xf0\xf0\xf0"
)
assert doc_dec["1"] == "0000000000800000"
assert doc_enc["105"]["len"] == b"000"
assert doc_enc["105"]["data"] == b""
assert doc_dec["105"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "1", "105"])
assert doc_dec.keys() == set(["h", "t", "p", "1", "105"])
def test_bitmaps_bcd():
"""
Field is required and not key provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["1"]["data_enc"] = "b"
spec["105"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "105": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert (
s
== b"header0210\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x80\x00\x00000"
)
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x80\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "8000000000000000"
assert doc_enc["1"]["len"] == b""
assert doc_enc["1"]["data"] == b"\x00\x00\x00\x00\x00\x80\x00\x00"
assert doc_dec["1"] == "0000000000800000"
assert doc_enc["105"]["len"] == b"000"
assert doc_enc["105"]["data"] == b""
assert doc_dec["105"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "1", "105"])
assert doc_dec.keys() == set(["h", "t", "p", "1", "105"])
def test_primary_bitmap_ascii_upper_case():
"""
This test makes sure that encoded primary bitmap is in upper case.
"""
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["5"]["len_type"] = 0
spec["5"]["max_len"] = 1
spec["5"]["data_enc"] = "ascii"
spec["7"]["len_type"] = 0
spec["7"]["max_len"] = 1
spec["7"]["data_enc"] = "ascii"
doc_dec = {"t": "0200", "5": "A", "7": "B"}
s, doc_enc = iso8583.encode(doc_dec, spec)
assert s == b"02000A00000000000000AB"
assert doc_dec["p"] == "0A00000000000000"
assert doc_enc["t"]["data"] == b"0200"
assert doc_enc["p"]["data"] == b"0A00000000000000"
assert doc_enc["5"]["data"] == b"A"
assert doc_enc["7"]["data"] == b"B"
def test_secondary_bitmap_ascii_upper_case():
"""
This test makes sure that encoded secondary bitmap is in upper case.
"""
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 0
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["1"]["data_enc"] = "ascii"
spec["69"]["len_type"] = 0
spec["69"]["max_len"] = 1
spec["69"]["data_enc"] = "ascii"
spec["71"]["len_type"] = 0
spec["71"]["max_len"] = 1
spec["71"]["data_enc"] = "ascii"
doc_dec = {"t": "0200", "69": "A", "71": "B"}
s, doc_enc = iso8583.encode(doc_dec, spec)
assert s == b"020080000000000000000A00000000000000AB"
assert doc_dec["p"] == "8000000000000000"
assert doc_dec["1"] == "0A00000000000000"
assert doc_enc["t"]["data"] == b"0200"
assert doc_enc["p"]["data"] == b"8000000000000000"
assert doc_enc["1"]["data"] == b"0A00000000000000"
assert doc_enc["69"]["data"] == b"A"
assert doc_enc["71"]["data"] == b"B"
def test_fixed_field_ascii_absent():
"""
ASCII fixed field is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ascii_partial():
"""
ASCII fixed field is required and partially provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "1"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ascii_over_max():
"""
ASCII fixed field is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "123"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ascii_incorrect_data():
"""
ASCII fixed field is required and provided.
However, the data is not ASCII
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "ascii"
doc_dec = {
"h": "header",
"t": "0210",
"2": b"\xff\xff".decode("latin-1"),
}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, invalid data: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ascii_present():
"""
ASCII fixed field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "22"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x0022"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b"22"
assert doc_dec["2"] == "22"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_ascii_present_zero_legnth():
"""
ASCII zero-length fixed field is required and provided
This is pointless but should work.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 0
spec["2"]["data_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_ebcdic_absent():
"""
EBCDIC fixed field is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ebcdic_partial():
"""
EBCDIC fixed field is required and partially provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "1"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ebcdic_over_max():
"""
EBCDIC fixed field is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "123"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_ebcdic_present():
"""
EBCDIC fixed field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "22"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\xf2\xf2"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b"\xf2\xf2"
assert doc_dec["2"] == "22"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_ebcdic_present_zero_legnth():
"""
EBCDIC zero-length fixed field is required and provided
This is pointless but should work.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 0
spec["2"]["data_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_bdc_absent():
"""
BDC fixed field is required and not provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": ""}
with pytest.raises(
iso8583.EncodeError, match="Field data is 0 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_bdc_partial():
"""
BDC fixed field is required and partial is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": "12"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 1 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_bdc_over_max():
"""
BDC fixed field is required and over max is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": "123456"}
with pytest.raises(
iso8583.EncodeError, match="Field data is 3 bytes, expecting 2: field 2"
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_bdc_odd():
"""
BDC fixed field is required and odd length is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": "12345"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, odd-length hex data: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_bdc_non_hex():
"""
BDC fixed field is required and provided
However, the data is not hex
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": "11xx"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, non-hex data: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_fixed_field_bcd_present():
"""
BCD fixed field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\x11\x22"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b"\x11\x22"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_bcd_present_zero_length():
"""
BCD zero-length fixed field is required and provided
This is pointless but should work.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 0
spec["2"]["data_enc"] = "b"
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b""
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_fixed_field_incorrect_encoding():
"""
Fixed field is required and provided.
However, the spec encoding is not correct
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 0
spec["2"]["max_len"] = 2
spec["2"]["data_enc"] = "invalid"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, unknown encoding specified: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_field_ascii_over_max():
"""
ASCII variable field is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "12345678901"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 11 bytes, larger than maximum 10: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_field_ascii_present():
"""
ASCII variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00041122"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"04"
assert doc_enc["2"]["data"] == b"1122"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_ascii_present_zero_legnth():
"""
ASCII zero-length variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x0000"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"00"
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_ebcdic_over_max():
"""
EBCDIC variable field is required and over max provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "ascii"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "cp500"
spec["2"]["len_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "12345678901"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 11 bytes, larger than maximum 10: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_field_ebcdic_present():
"""
EBCDIC variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "cp500"
spec["2"]["len_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\xf0\xf4\xf1\xf1\xf2\xf2"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"\xf0\xf4"
assert doc_enc["2"]["data"] == b"\xf1\xf1\xf2\xf2"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_ebcdic_present_zero_legnth():
"""
EBCDIC zero-length variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "cp500"
spec["2"]["len_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\xf0\xf0"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"\xf0\xf0"
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_bdc_over_max():
"""
BDC variable field is required and over max is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 5
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = "bcd"
doc_dec = {"h": "header", "t": "0210", "2": "123456789012"}
with pytest.raises(
iso8583.EncodeError,
match="Field data is 6 bytes, larger than maximum 5: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_field_bdc_odd():
"""
BDC variable field is required and odd length is provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = "bcd"
doc_dec = {"h": "header", "t": "0210", "2": "12345"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field, odd-length hex data: field 2",
):
iso8583.encode(doc_dec, spec=spec)
def test_variable_field_bdc_ascii_length():
"""
BDC variable field is required and provided
The length is in ASCII.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 3
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = "ascii"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00002\x11\x22"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"002"
assert doc_enc["2"]["data"] == b"\x11\x22"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_bdc_ebcdic_length():
"""
BDC variable field is required and provided
The length is in EBCDIC.
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 3
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = "cp500"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\xf0\xf0\xf2\x11\x22"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"\xf0\xf0\xf2"
assert doc_enc["2"]["data"] == b"\x11\x22"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
# fmt: off
@pytest.mark.parametrize(
["len_enc"],
[
("b",),
("bcd",),
],
)
# fmt: on
def test_variable_field_bcd_present(len_enc: str) -> None:
"""
BCD variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = len_enc
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\x00\x02\x11\x22"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"\x00\x02"
assert doc_enc["2"]["data"] == b"\x11\x22"
assert doc_dec["2"] == "1122"
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
# fmt: off
@pytest.mark.parametrize(
["len_enc"],
[
("b",),
("bcd",),
],
)
# fmt: on
def test_variable_field_bcd_present_zero_length(len_enc: str):
"""
BCD zero-length variable field is required and provided
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "b"
spec["2"]["len_enc"] = len_enc
doc_dec = {"h": "header", "t": "0210", "2": ""}
s, doc_enc = iso8583.encode(doc_dec, spec=spec)
assert s == b"header0210\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00"
assert doc_enc["h"]["len"] == b""
assert doc_enc["h"]["data"] == b"header"
assert doc_dec["h"] == "header"
assert doc_enc["t"]["len"] == b""
assert doc_enc["t"]["data"] == b"0210"
assert doc_dec["t"] == "0210"
assert doc_enc["p"]["len"] == b""
assert doc_enc["p"]["data"] == b"\x40\x00\x00\x00\x00\x00\x00\x00"
assert doc_dec["p"] == "4000000000000000"
assert doc_enc["2"]["len"] == b"\x00\x00"
assert doc_enc["2"]["data"] == b""
assert doc_dec["2"] == ""
assert doc_enc.keys() == set(["h", "t", "p", "2"])
assert doc_dec.keys() == set(["h", "t", "p", "2"])
def test_variable_field_incorrect_encoding():
"""
Variable field is required and provided.
However, the spec encoding is not correct for length
"""
spec["h"]["data_enc"] = "ascii"
spec["h"]["len_type"] = 0
spec["h"]["max_len"] = 6
spec["t"]["data_enc"] = "ascii"
spec["p"]["data_enc"] = "b"
spec["2"]["len_type"] = 2
spec["2"]["max_len"] = 10
spec["2"]["data_enc"] = "ascii"
spec["2"]["len_enc"] = "invalid"
doc_dec = {"h": "header", "t": "0210", "2": "1122"}
with pytest.raises(
iso8583.EncodeError,
match="Failed to encode field length, unknown encoding specified: field 2",
):
iso8583.encode(doc_dec, spec=spec)
| 28.345112
| 100
| 0.548722
| 10,699
| 72,195
| 3.544537
| 0.023367
| 0.075627
| 0.095404
| 0.104106
| 0.944677
| 0.936292
| 0.915592
| 0.903278
| 0.894075
| 0.883132
| 0
| 0.087652
| 0.220611
| 72,195
| 2,546
| 101
| 28.356245
| 0.586319
| 0.069839
| 0
| 0.81543
| 0
| 0.012463
| 0.266346
| 0.051921
| 0
| 0
| 0
| 0
| 0.310979
| 1
| 0.052819
| false
| 0
| 0.002967
| 0
| 0.055786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0364fdae59826dd9265411a636ea51f011bebfa
| 116,999
|
py
|
Python
|
model/layer.py
|
CIS-group/MolNet
|
a8e07bf3598b2d48f3f025722ba60e76a53cf54b
|
[
"MIT"
] | 1
|
2022-03-31T03:32:46.000Z
|
2022-03-31T03:32:46.000Z
|
model/layer.py
|
CIS-group/MolNet
|
a8e07bf3598b2d48f3f025722ba60e76a53cf54b
|
[
"MIT"
] | null | null | null |
model/layer.py
|
CIS-group/MolNet
|
a8e07bf3598b2d48f3f025722ba60e76a53cf54b
|
[
"MIT"
] | null | null | null |
from keras import initializers, regularizers, activations
from keras.layers import Dense, Add, BatchNormalization, PReLU
#from keras.layers.advanced_activations import LeakyReLU, PReLU
from keras.engine.topology import Layer
from keras import backend as K
import tensorflow as tf
import tensorflow_probability as tfp
import numpy as np
class GraphEmbed(Layer):
def __init__(self, **kwargs):
super(GraphEmbed, self).__init__(**kwargs)
def build(self, input_shape):
super(GraphEmbed, self).build(input_shape)
def call(self, inputs, mask=None):
# Import graph tensors
# init_feats = (samples, max_atoms, atom_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
init_feats, distances = inputs
# Get parameters
max_atoms = int(init_feats.shape[1])
atom_feat = int(init_feats.shape[-1])
coor_dims = int(distances.shape[-1])
# Generate vector features filled with zeros
vector_features = tf.zeros_like(init_feats)
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, atom_feat])
vector_features = tf.tile(vector_features, [1, 1, coor_dims, 1]) # (samples, max_atoms, coor_dims, atom_feat)
return [init_feats, vector_features]
def compute_output_shape(self, input_shape):
return [input_shape[0], (input_shape[0][0], input_shape[0][1], input_shape[-1][-1], input_shape[0][-1])]
class GraphSToS(Layer):
def __init__(self,
filters,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
super(GraphSToS, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphSToS, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[-1]
self.w_ss = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_ss')
if self.bias_initializer is not None:
self.b_ss = self.add_weight(shape=(self.filters,),
name='b_ss',
initializer=self.bias_initializer)
super(GraphSToS, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
scalar_features = inputs
# Get parameters
max_atoms = int(scalar_features.shape[1])
atom_feat = int(scalar_features.shape[-1])
# Expand scalar features to 4D
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, 1, atom_feat])
scalar_features = tf.tile(scalar_features, [1, 1, max_atoms, 1]) # (samples, max_atoms, max_atoms, atom_feat)
# Combine between atoms
scalar_features_t = tf.transpose(scalar_features, perm=[0, 2, 1, 3]) # (samples, max_atoms, max_atoms, atom_feat)
scalar_features = tf.concat([scalar_features, scalar_features_t], -1) # (samples, max_atoms, max_atoms, atom_feat*2)
# Linear combination
scalar_features = tf.reshape(scalar_features, [-1, atom_feat * 2])
# scalar_features = tf.matmul(scalar_features, self.w_ss) + self.b_ss
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_ss)
else:
scalar_features = tf.matmul(scalar_features, self.w_ss) + self.b_ss
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, max_atoms, self.filters])
# multiply bond feature
if bond is not None:
scalar_features = tf.linalg.einsum('aijk,aijk->aijk', scalar_features, bond)
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
scalar_features = tf.linalg.einsum('aijk,aij->aijk', scalar_features, mask)
# Activation
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0], input_shape[1], input_shape[1], self.filters
class GraphSToV(Layer):
def __init__(self,
filters,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
super(GraphSToV, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphSToV, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
self.w_sv = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_sv')
if self.bias_initializer is not None:
self.b_sv = self.add_weight(shape=(self.filters,),
name='b_sv',
initializer=self.bias_initializer)
super(GraphSToV, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
scalar_features, distances = inputs
# Get parameters
max_atoms = int(scalar_features.shape[1])
atom_feat = int(scalar_features.shape[-1])
coor_dims = int(distances.shape[-1])
# Expand scalar features to 4D
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, 1, atom_feat])
scalar_features = tf.tile(scalar_features, [1, 1, max_atoms, 1])
# Combine between atoms
scalar_features_t = tf.transpose(scalar_features, perm=[0, 2, 1, 3])
scalar_features = tf.concat([scalar_features, scalar_features_t], -1)
# Apply weights
scalar_features = tf.reshape(scalar_features, [-1, atom_feat * 2])
# scalar_features = tf.matmul(scalar_features, self.w_sv) + self.b_sv
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_sv)
else:
scalar_features = tf.matmul(scalar_features, self.w_sv) + self.b_sv
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, max_atoms, 1, self.filters])
scalar_features = tf.tile(scalar_features, [1, 1, 1, coor_dims, 1])
# multiply bond feature
if bond is not None:
scalar_features = tf.linalg.einsum('aijkl,aijl->aijkl', scalar_features, bond)
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
scalar_features = tf.linalg.einsum('aijkl,aij->aijkl', scalar_features, mask)
# Expand distances to 5D
distances = tf.reshape(distances, [-1, max_atoms, max_atoms, coor_dims, 1])
distances = tf.tile(distances, [1, 1, 1, 1, self.filters])
# Tensor product
vector_features = tf.multiply(scalar_features, distances)
# Activation
vector_features = self.activation(vector_features)
return vector_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], input_shape[1][-1], self.filters
class GraphVToV(Layer):
def __init__(self,
filters,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
super(GraphVToV, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphVToV, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[-1]
self.w_vv = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_vv')
if self.bias_initializer is not None:
self.b_vv = self.add_weight(shape=(self.filters,),
name='b_vv',
initializer=self.bias_initializer)
super(GraphVToV, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features = (samples, max_atoms, coor_dims, atom_feat)
vector_features = inputs
# Get parameters
max_atoms = int(vector_features.shape[1])
atom_feat = int(vector_features.shape[-1])
coor_dims = int(vector_features.shape[-2])
# Expand vector features to 5D
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, coor_dims, atom_feat])
vector_features = tf.tile(vector_features, [1, 1, max_atoms, 1, 1])
# Combine between atoms
vector_features_t = tf.transpose(vector_features, perm=[0, 2, 1, 3, 4])
vector_features = tf.concat([vector_features, vector_features_t], -1)
# Apply weights
vector_features = tf.reshape(vector_features, [-1, atom_feat * 2])
# vector_features = tf.matmul(vector_features, self.w_vv) + self.b_vv
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_vv)
else:
vector_features = tf.matmul(vector_features, self.w_vv) + self.b_vv
vector_features = tf.reshape(vector_features, [-1, max_atoms, max_atoms, coor_dims, self.filters])
# multiply bond feature
if bond is not None:
vector_features = tf.linalg.einsum('aijkl,aijl->aijkl', vector_features, bond)
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
vector_features = tf.linalg.einsum('aijkl,aij->aijkl', vector_features, mask)
# Activation
vector_features = self.activation(vector_features)
return vector_features
def compute_output_shape(self, input_shape):
return input_shape[0], input_shape[1], input_shape[1], input_shape[-2], self.filters
class GraphVToS(Layer):
def __init__(self,
filters,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
super(GraphVToS, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphVToS, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
self.w_vs = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_vs')
if self.bias_initializer is not None:
self.b_vs = self.add_weight(shape=(self.filters,),
name='b_vs',
initializer=self.bias_initializer)
super(GraphVToS, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features = (samples, max_atoms, coor_dims, atom_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
vector_features, distances = inputs
# Get parameters
max_atoms = int(vector_features.shape[1])
atom_feat = int(vector_features.shape[-1])
coor_dims = int(vector_features.shape[-2])
# Expand vector features to 5D
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, coor_dims, atom_feat])
vector_features = tf.tile(vector_features, [1, 1, max_atoms, 1, 1])
# Combine between atoms
vector_features_t = tf.transpose(vector_features, perm=[0, 2, 1, 3, 4])
vector_features = tf.concat([vector_features, vector_features_t], -1)
# Apply weights
vector_features = tf.reshape(vector_features, [-1, atom_feat * 2])
# vector_features = tf.matmul(vector_features, self.w_vs) + self.b_vs
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_vs)
else:
vector_features = tf.matmul(vector_features, self.w_vs) + self.b_vs
vector_features = tf.reshape(vector_features, [-1, max_atoms, max_atoms, coor_dims, self.filters])
# multiply bond feature
if bond is not None:
vector_features = tf.linalg.einsum('aijkl,aijl->aijkl', vector_features, bond)
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
vector_features = tf.linalg.einsum('aijkl,aij->aijkl', vector_features, mask)
# # Calculate r^ = r / |r| and expand it to 5D
# distances_hat = tf.sqrt(tf.reduce_sum(tf.square(distances), axis=-1, keepdims=True))
# distances_hat = distances_hat + tf.cast(tf.equal(distances_hat, 0), tf.float32)
# distances_hat = tf.divide(distances, distances_hat)
# distances_hat = tf.reshape(distances_hat, [-1, max_atoms, max_atoms, coor_dims, 1])
# distances_hat = tf.tile(distances_hat, [1, 1, 1, 1, self.filters])
distances_hat = tf.reshape(distances, [-1, max_atoms, max_atoms, coor_dims, 1])
distances_hat = tf.tile(distances_hat, [1, 1, 1, 1, self.filters])
# Projection of v onto r = v (dot) r^
scalar_features = tf.multiply(vector_features, distances_hat)
scalar_features = tf.reduce_sum(scalar_features, axis=-2)
# Activation
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], self.filters
class GraphConvS(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.pooling = pooling
super(GraphConvS, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConvS, self).get_config()
base_config['filters'] = self.filters
base_config['pooling'] = self.pooling
return base_config
def build(self, input_shape):
atom_feat_1 = input_shape[0][-1]
atom_feat_2 = input_shape[1][-1]
self.w_conv_scalar = self.add_weight(shape=(atom_feat_1 + atom_feat_2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv_scalar')
if self.bias_initializer is not None:
self.b_conv_scalar = self.add_weight(shape=(self.filters,),
name='b_conv_scalar',
initializer=self.bias_initializer)
super(GraphConvS, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features_1 = (samples, max_atoms, max_atoms, atom_feat)
# scalar_features_2 = (samples, max_atoms, max_atoms, atom_feat)
# adjacency = (samples, max_atoms, max_atoms)
scalar_features_1, scalar_features_2, adjacency = inputs
# Get parameters
max_atoms = int(scalar_features_1.shape[1])
atom_feat_1 = int(scalar_features_1.shape[-1])
atom_feat_2 = int(scalar_features_2.shape[-1])
# Concatenate two features
scalar_features = tf.concat([scalar_features_1, scalar_features_2], axis=-1)
# Linear combination
scalar_features = tf.reshape(scalar_features, [-1, atom_feat_1 + atom_feat_2])
# scalar_features = tf.matmul(scalar_features, self.w_conv_scalar) + self.b_conv_scalar
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_conv_scalar)
else:
scalar_features = tf.matmul(scalar_features, self.w_conv_scalar) + self.b_conv_scalar
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, max_atoms, self.filters])
# Adjacency masking
adjacency = tf.reshape(adjacency, [-1, max_atoms, max_atoms, 1])
adjacency = tf.tile(adjacency, [1, 1, 1, self.filters])
scalar_features = tf.multiply(scalar_features, adjacency)
# Integrate over second atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=2)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=2)
elif self.pooling == "mean":
scalar_features = tf.reduce_mean(scalar_features, axis=2)
elif self.pooling == "all":
scalar_features = tf.stack([tf.reduce_sum(scalar_features, axis=2),
tf.reduce_max(scalar_features, axis=2),
tf.reduce_mean(scalar_features, axis=2)],
axis=-1)
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, self.filters * 3])
# Activation
if self.activation == "prelu":
self.activation = PReLU(input_shape=(max_atoms, self.filters), shared_axes=[1, 2])
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, input_shape):
if self.pooling == "all":
return input_shape[0][0], input_shape[0][1], self.filters * 3
else:
return input_shape[0][0], input_shape[0][1], self.filters
class GraphConvV(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.pooling = pooling
super(GraphConvV, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConvV, self).get_config()
base_config['filters'] = self.filters
base_config['pooling'] = self.pooling
return base_config
def build(self, input_shape):
atom_feat_1 = input_shape[0][-1]
atom_feat_2 = input_shape[1][-1]
self.w_conv_vector = self.add_weight(shape=(atom_feat_1 + atom_feat_2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv_vector')
if self.bias_initializer is not None:
self.b_conv_vector = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_conv_vector')
super(GraphConvV, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features_1 = (samples, max_atoms, max_atoms, coor_dims, atom_feat)
# vector_features_2 = (samples, max_atoms, max_atoms, coor_dims, atom_feat)
# adjacency = (samples, max_atoms, max_atoms)
vector_features_1, vector_features_2, adjacency = inputs
# Get parameters
max_atoms = int(vector_features_1.shape[1])
atom_feat_1 = int(vector_features_1.shape[-1])
atom_feat_2 = int(vector_features_2.shape[-1])
coor_dims = int(vector_features_1.shape[-2])
# Concatenate two features
vector_features = tf.concat([vector_features_1, vector_features_2], axis=-1)
# Linear combination
vector_features = tf.reshape(vector_features, [-1, atom_feat_1 + atom_feat_2])
# vector_features = tf.matmul(vector_features, self.w_conv_vector) + self.b_conv_vector
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_conv_vector)
else:
vector_features = tf.matmul(vector_features, self.w_conv_vector) + self.b_conv_vector
vector_features = tf.reshape(vector_features, [-1, max_atoms, max_atoms, coor_dims, self.filters])
# Adjacency masking
adjacency = tf.reshape(adjacency, [-1, max_atoms, max_atoms, 1, 1])
adjacency = tf.tile(adjacency, [1, 1, 1, coor_dims, self.filters])
vector_features = tf.multiply(vector_features, adjacency)
# Integrate over second atom axis
if self.pooling == "sum":
vector_features = tf.reduce_sum(vector_features, axis=2)
elif self.pooling == "max":
vector_features = tf.reduce_max(vector_features, axis=2)
elif self.pooling == "avg":
vector_features = tf.reduce_mean(vector_features, axis=2)
elif self.pooling == "all":
vector_features = tf.stack([tf.reduce_sum(vector_features, axis=2),
tf.reduce_max(vector_features, axis=2),
tf.reduce_mean(vector_features, axis=2)],
axis=-1)
vector_features = tf.reshape(vector_features, [-1, max_atoms, coor_dims, self.filters * 3])
# Activation
vector_features = self.activation(vector_features)
return vector_features
def compute_output_shape(self, input_shape):
if self.pooling == "all":
return input_shape[0][0], input_shape[0][1], input_shape[0][-2], self.filters * 3
else:
return input_shape[0][0], input_shape[0][1], input_shape[0][-2], self.filters
class GraphGather(Layer):
def __init__(self,
pooling="sum",
system="cartesian",
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.activation_n = activation
self.pooling = pooling
self.system = system
super(GraphGather, self).__init__(**kwargs)
def build(self, inputs_shape):
super(GraphGather, self).build(inputs_shape)
def get_config(self):
base_config = super(GraphGather, self).get_config()
base_config['pooling'] = self.pooling
base_config['system'] = self.system
return base_config
def call(self, inputs, adjms=None, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# vector_features = (samples, max_atoms, coor_dims, atom_feat)
scalar_features, vector_features = inputs
# Get parameters
max_atoms = int(vector_features.shape[1])
atom_feat = int(vector_features.shape[-1])
coor_dims = int(vector_features.shape[-2])
# Integrate over atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=1)
vector_features = tf.reduce_sum(vector_features, axis=1)
elif self.pooling == "avg":
scalar_features = tf.reduce_mean(scalar_features, axis=1)
vector_features = tf.reduce_mean(vector_features, axis=1)
elif self.pooling == "avg_adv_1":
#mask = tf.reduce_max(adjms, axis=-1)
#mask = tf.where(mask > tf.zeros_like(mask), tf.ones_like(mask), tf.zeros_like(mask)) # (batch, num_atoms)
mask_s = tf.reshape(mask, [-1, max_atoms, 1])
mask_s = tf.tile(mask_s, [1, 1, atom_feat]) # (batch, max_atoms, atom_feat)
mask_v = tf.reshape(mask, [-1, max_atoms, 1, 1])
mask_v = tf.tile(mask_v, [1, 1, coor_dims, atom_feat]) # (batch, max_atoms, 3, atom_feat)
scalar_features = tf.ragged.boolean_mask(scalar_features, mask_s) # (batch, ?, ?)
vector_features = tf.ragged.boolean_mask(vector_features, mask_v) # (batch, ?, ?, ?)
scalar_features = tf.reduce_mean(scalar_features, axis=1)
#scalar_features = tf.cast(scalar_features, tf.float32)
vector_features = tf.reduce_mean(vector_features, axis=1)
elif self.pooling == "avg_adv_2":
# mask = tf.reduce_max(adjms, axis=-1)
# mask = tf.where(mask > tf.zeros_like(mask), tf.ones_like(mask), tf.zeros_like(mask)) # (batch, num_atoms)
num_atoms = tf.reduce_sum(mask, axis=1) # (batch,)
num_atoms_s = tf.reshape(num_atoms, [-1, 1])
num_atoms_s = tf.tile(num_atoms_s, [1, atom_feat]) # (batch, atom_feat)
num_atoms_v = tf.reshape(num_atoms, [-1, 1, 1])
num_atoms_v = tf.tile(num_atoms_v, [1, coor_dims, atom_feat]) # (batch, 3, atom_feat)
# scalar_features = tf.linalg.einsum('aij,ai->aij', scalar_features, mask)
scalar_features = tf.reduce_sum(scalar_features, axis=1) # (batch, atom_feat)
scalar_features = tf.truediv(scalar_features, num_atoms_s) # (batch, atom_feat)
# vector_features = tf.linalg.einsum('aijk,ai->aijk', vector_features, mask)
vector_features = tf.reduce_sum(vector_features, axis=1) # (batch, 3, atom_feat)
vector_features = tf.truediv(vector_features, num_atoms_v) # (batch, 3, atom_feat)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=1)
vector_features = tf.transpose(vector_features, perm=[0, 2, 3, 1])
size = tf.sqrt(tf.reduce_sum(tf.square(vector_features), axis=1))
idx = tf.reshape(tf.argmax(size, axis=-1, output_type=tf.int32), [-1, 1, atom_feat, 1])
idx = tf.tile(idx, [1, coor_dims, 1, 1])
vector_features = tf.reshape(tf.batch_gather(vector_features, idx), [-1, coor_dims, atom_feat])
#vector_features = tf.reshape(tf.gather(vector_features, idx, batch_dims=-1), [-1, coor_dims, atom_feat])
elif self.pooling == "min":
scalar_features = tf.reduce_min(scalar_features, axis=1)
vector_features = tf.transpose(vector_features, perm=[0, 2, 3, 1])
size = tf.sqrt(tf.reduce_sum(tf.square(vector_features), axis=1))
idx = tf.reshape(tf.argmin(size, axis=-1, output_type=tf.int32), [-1, 1, atom_feat, 1])
idx = tf.tile(idx, [1, coor_dims, 1, 1])
vector_features = tf.reshape(tf.batch_gather(vector_features, idx), [-1, coor_dims, atom_feat])
#vector_features = tf.reshape(tf.gather(vector_features, idx, batch_dims=-1), [-1, coor_dims, atom_feat])
elif self.pooling == "max_adv":
scalar_features = tf.reduce_max(scalar_features, axis=1)
vector_features = tf.transpose(vector_features, perm=[0, 2, 3, 1])
size = tf.sqrt(tf.reduce_sum(tf.square(vector_features), axis=1))
idx = tf.reshape(tf.argmax(size, axis=-1, output_type=tf.int32), [-1, 1, atom_feat, 1])
idx = tf.tile(idx, [1, coor_dims, 1, 1])
vector_features = tf.reshape(tf.gather(vector_features, idx, batch_dims=-1), [-1, coor_dims, atom_feat])
# Activation
scalar_features = self.activation(scalar_features)
vector_features = self.activation(vector_features)
if self.system == "spherical":
x, y, z = tf.unstack(vector_features, axis=1)
r = tf.sqrt(tf.square(x) + tf.square(y) + tf.square(z))
t = tf.acos(tf.divide(z, r + tf.cast(tf.equal(r, 0), dtype=float)))
p = tf.atan(tf.divide(y, x + tf.cast(tf.equal(x, 0), dtype=float)))
vector_features = tf.stack([r, t, p], axis=1)
return [scalar_features, vector_features]
def compute_output_shape(self, inputs_shape):
#return [(inputs_shape[0][0], inputs_shape[0][2]), (inputs_shape[1][0], inputs_shape[1][2], inputs_shape[1][3])]
if self.pooling in ["maxmin", "maxavg", "maxsum"]: # multipooling
return [(inputs_shape[0][0], 2 * inputs_shape[0][2]),
(inputs_shape[1][0], inputs_shape[1][2], 2 * inputs_shape[1][3])]
else: # one pooling
return [(inputs_shape[0][0], inputs_shape[0][2]), (inputs_shape[1][0], inputs_shape[1][2], inputs_shape[1][3])]
class GraphGatherS(Layer):
def __init__(self,
pooling="sum",
system="cartesian",
activation=None,
**kwargs):
self.activation = activations.get(activation)
self.pooling = pooling
self.system = system
super(GraphGatherS, self).__init__(**kwargs)
def build(self, inputs_shape):
super(GraphGatherS, self).build(inputs_shape)
def get_config(self):
base_config = super(GraphGatherS, self).get_config()
base_config['pooling'] = self.pooling
base_config['system'] = self.system
return base_config
def call(self, inputs, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
scalar_features = inputs
# Integrate over atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=1)
elif self.pooling == "avg":
scalar_features = tf.reduce_mean(scalar_features, axis=1)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=1)
# Activation
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, inputs_shape):
return (inputs_shape[0][0], inputs_shape[0][2])
class Set2Set(Layer):
def __init__(self,
output_dim,
step=3,
activation_lstm='tanh',
activation_recurrent='hard_sigmoid',
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
**kwargs):
self.supports_masking = True
self.output_dim = output_dim
self.step = step
self.activation_lstm = activations.get(activation_lstm)
self.activation_recurrent = activations.get(activation_recurrent)
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.w_linear, self.b_linear, self.w_recurrent = None, None, None
self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c = None, None, None
super(Set2Set, self).__init__(**kwargs)
def build(self, inputs_shape):
self.w_recurrent = self.add_weight(name='s2s_w_recurrent',
shape=(self.output_dim * 2, self.output_dim * 4),
initializer=self.recurrent_initializer)
self.b_recurrent_a = self.add_weight(name='s2s_b_recurrent_a',
shape=(self.output_dim * 1,),
initializer=initializers.Zeros())
self.b_recurrent_b = self.add_weight(name='s2s_b_recurrent_b',
shape=(self.output_dim * 1,),
initializer=initializers.Ones())
self.b_recurrent_c = self.add_weight(name='s2s_b_recurrent_c',
shape=(self.output_dim * 2,),
initializer=initializers.Zeros())
super(Set2Set, self).build(inputs_shape)
def get_config(self):
config = {
'pooling': "s2s",
'output_dim': self.output_dim,
'step': self.step,
'activation_lstm': activations.serialize(self.activation_lstm),
'activation_recurrent': activations.serialize(self.activation_recurrent),
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer': initializers.serialize(self.recurrent_initializer)}
base_config = super(Set2Set, self).get_config()
return {**base_config, **config}
def call(self, inputs, mask=None, **kwargs):
# Import graph scalar tensors
# scalar_features = (samples, max_atoms, atom_feat)
# adjacency = (samples, max_atoms, max_atoms)
features, adjacency = inputs
# Get parameters
num_features = int(features.shape[-1])
# Linear combination
#features = tf.matmul(features, self.w_linear) + self.b_linear
# Set2Set embedding
c = features
q_star = tf.reduce_sum(tf.zeros_like(features), axis=1, keepdims=True) # (batch, 1, num_features)
c = tf.zeros_like(q_star) # (batch, 1, num_features)
q_star = tf.concat([q_star, q_star], -1) # (batch, 1, 2*num_features)
for i in range(self.step):
q, c = self._lstm(q_star, c) # (batch, 1, outdims), (batch, 1, outdims)
e = tf.linalg.einsum('aij,akj->aik', features, q) # (batch, num_atoms, 1)
a = tf.nn.softmax(e, axis=1) # (batch, num_atoms, 1)
a = tf.tile(a, [1, 1, num_features]) # (batch, num_atoms, 1*num_features)
r = tf.reduce_sum(tf.multiply(a, features), axis=1, keepdims=True) # (batch, 1, 1*num_features)
q_star = tf.concat([q, r], -1) # (batch, 1, 2*num_features)
return tf.reshape(q_star, [-1, self.output_dim * 2]) # (batch, 2*num_features)
def _lstm(self, h, c):
z = tf.matmul(h, self.w_recurrent) + tf.concat([self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c], -1)
i = self.activation_recurrent(z[:, :, :self.output_dim])
f = self.activation_recurrent(z[:, :, self.output_dim:self.output_dim * 2])
o = self.activation_recurrent(z[:, :, self.output_dim * 2:self.output_dim * 3])
c_out = f * c + i * self.activation_lstm(z[:, :, self.output_dim * 3:])
h_out = o * self.activation_lstm(c_out)
return h_out, c_out
def compute_mask(self, inputs, mask=None):
return None
def compute_output_shape(self, inputs_shape):
return inputs_shape[0], 2 * self.output_dim
class Set2SetS(Layer):
def __init__(self,
output_dim,
step=3,
activation_lstm='tanh',
activation_recurrent='hard_sigmoid',
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
**kwargs):
self.supports_masking = True
self.output_dim = output_dim
self.step = step
self.activation_lstm = activations.get(activation_lstm)
self.activation_recurrent = activations.get(activation_recurrent)
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.w_linear, self.b_linear, self.w_recurrent = None, None, None
self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c = None, None, None
super(Set2SetS, self).__init__(**kwargs)
def build(self, inputs_shape):
self.w_recurrent = self.add_weight(name='s2s_w_recurrent',
shape=(self.output_dim * 2, self.output_dim * 4),
initializer=self.recurrent_initializer)
self.b_recurrent_a = self.add_weight(name='s2s_b_recurrent_a',
shape=(self.output_dim * 1,),
initializer=initializers.Zeros())
self.b_recurrent_b = self.add_weight(name='s2s_b_recurrent_b',
shape=(self.output_dim * 1,),
initializer=initializers.Ones())
self.b_recurrent_c = self.add_weight(name='s2s_b_recurrent_c',
shape=(self.output_dim * 2,),
initializer=initializers.Zeros())
super(Set2SetS, self).build(inputs_shape)
def get_config(self):
config = {
'pooling': "s2s",
'output_dim': self.output_dim,
'step': self.step,
'activation_lstm': activations.serialize(self.activation_lstm),
'activation_recurrent': activations.serialize(self.activation_recurrent),
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer': initializers.serialize(self.recurrent_initializer)}
base_config = super(Set2SetS, self).get_config()
return {**base_config, **config}
def call(self, inputs, mask=None, **kwargs):
# Import graph scalar tensors
features = inputs # scalar_features = (samples, max_atoms, atom_feat)
# Get parameters
num_features = int(features.shape[-1])
# Linear combination
#features = tf.matmul(features, self.w_linear) + self.b_linear
# Set2Set embedding
q_star = tf.reduce_sum(tf.zeros_like(features), axis=1, keepdims=True) # (batch, 1, num_features)
c = tf.zeros_like(q_star) # (batch, 1, num_features)
q_star = tf.concat([q_star, q_star], -1) # (batch, 1, 2*num_features)
for i in range(self.step):
q, c = self._lstm(q_star, c) # (batch, 1, outdims), (batch, 1, outdims)
e = tf.linalg.einsum('aij,akj->aik', features, q) # (batch, num_atoms, 1)
a = tf.nn.softmax(e, axis=1) # (batch, num_atoms, 1)
a = tf.tile(a, [1, 1, num_features]) # (batch, num_atoms, 1*num_features)
r = tf.reduce_sum(tf.multiply(a, features), axis=1, keepdims=True) # (batch, 1, 1*num_features)
q_star = tf.concat([q, r], -1) # (batch, 1, 2*num_features)
return tf.reshape(q_star, [-1, self.output_dim * 2]) # (batch, 2*num_features)
def _lstm(self, h, c):
z = tf.matmul(h, self.w_recurrent) + tf.concat([self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c], -1)
i = self.activation_recurrent(z[:, :, :self.output_dim])
f = self.activation_recurrent(z[:, :, self.output_dim:self.output_dim * 2])
o = self.activation_recurrent(z[:, :, self.output_dim * 2:self.output_dim * 3])
c_out = f * c + i * self.activation_lstm(z[:, :, self.output_dim * 3:])
h_out = o * self.activation_lstm(c_out)
return h_out, c_out
def compute_mask(self, inputs, mask=None):
return None
def compute_output_shape(self, inputs_shape):
return inputs_shape[0], 2 * self.output_dim
class Set2SetV(Layer):
def __init__(self,
output_dim,
step=3,
activation_lstm='tanh',
activation_recurrent='hard_sigmoid',
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
**kwargs):
self.supports_masking = True
self.output_dim = output_dim
self.step = step
self.activation_lstm = activations.get(activation_lstm)
self.activation_recurrent = activations.get(activation_recurrent)
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.w_linear, self.b_linear, self.w_recurrent = None, None, None
self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c = None, None, None
super(Set2SetV, self).__init__(**kwargs)
def build(self, inputs_shape):
self.w_recurrent = self.add_weight(name='s2s_w_recurrent',
shape=(self.output_dim * 2, self.output_dim * 4),
initializer=self.recurrent_initializer)
self.b_recurrent_a = self.add_weight(name='s2s_b_recurrent_a',
shape=(self.output_dim * 1,),
initializer=initializers.Zeros())
self.b_recurrent_b = self.add_weight(name='s2s_b_recurrent_b',
shape=(self.output_dim * 1,),
initializer=initializers.Ones())
self.b_recurrent_c = self.add_weight(name='s2s_b_recurrent_c',
shape=(self.output_dim * 2,),
initializer=initializers.Zeros())
super(Set2SetV, self).build(inputs_shape)
def get_config(self):
config = {
'pooling': "s2s",
'output_dim': self.output_dim,
'step': self.step,
'activation_lstm': activations.serialize(self.activation_lstm),
'activation_recurrent': activations.serialize(self.activation_recurrent),
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'recurrent_initializer': initializers.serialize(self.recurrent_initializer)}
base_config = super(Set2SetV, self).get_config()
return {**base_config, **config}
def call(self, inputs, mask=None, **kwargs):
# Import graph scalar tensors
features = inputs # vector_features = (samples, max_atoms, coor_dims, atom_feat)
# Get parameters
num_features = int(features.shape[-1])
# Linear combination
# Set2Set embedding
q_star = tf.reduce_sum(tf.zeros_like(features), axis=1, keepdims=True) # (batch, 1, 3, num_features)
c = tf.zeros_like(q_star) # (batch, 1, 3, num_features)
q_star = tf.concat([q_star, q_star], -1) # (batch, 1, 3, 2*num_features)
for i in range(self.step):
q, c = self._lstm(q_star, c) # (batch, 1, outdims), (batch, 1, 3, outdims)
e = tf.linalg.einsum('aijk,aljk->ailj', features, q) # (batch, num_atoms, 1, 3)
e = tf.transpose(e, perm=[0, 1, 3, 2])
a = tf.nn.softmax(e, axis=1) # (batch, num_atoms, 1, 3)
a = tf.tile(a, [1, 1, 1, num_features]) # (batch, num_atoms, 3, 1*num_features)
r = tf.reduce_sum(tf.multiply(a, features), axis=1, keepdims=True) # (batch, 1, 3, 1*num_features)
q_star = tf.concat([q, r], -1) # (batch, 1, 3, 2*num_features)
return tf.reshape(q_star, [-1, 3, self.output_dim * 2]) # (batch, 3, 2*num_features)
def _lstm(self, h, c):
z = tf.matmul(h, self.w_recurrent) + tf.concat([self.b_recurrent_a, self.b_recurrent_b, self.b_recurrent_c], -1)
i = self.activation_recurrent(z[:, :, :, :self.output_dim])
f = self.activation_recurrent(z[:, :, :, self.output_dim:self.output_dim * 2])
o = self.activation_recurrent(z[:, :, :, self.output_dim * 2:self.output_dim * 3])
c_out = f * c + i * self.activation_lstm(z[:, :, :, self.output_dim * 3:])
h_out = o * self.activation_lstm(c_out)
return h_out, c_out
def compute_mask(self, inputs, mask=None):
return None
def compute_output_shape(self, inputs_shape):
return inputs_shape[0], 3, 2 * self.output_dim
# for edge feature conv
class GraphEmbed_edge(Layer):
def __init__(self,
filters=0,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
self.filters = filters
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
super(GraphEmbed_edge, self).__init__(**kwargs)
def build(self, input_shape):
super(GraphEmbed_edge, self).build(input_shape)
def call(self, inputs, mask=None):
# Import graph tensors
# init_feats = (samples, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
init_feats, distances, bonds = inputs
# Get parameters
max_atoms = int(init_feats.shape[1])
atom_feat = int(init_feats.shape[-1])
coor_dims = int(distances.shape[-1])
# Generate vector features filled with zeros
vector_features = tf.zeros_like(init_feats)
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, atom_feat])
vector_features = tf.tile(vector_features,
[1, 1, coor_dims, 1]) # (samples, max_atoms, coor_dims, atom_feat)
return [init_feats, vector_features, bonds]
def compute_output_shape(self, input_shape):
return [input_shape[0], (input_shape[0][0], input_shape[0][1], input_shape[1][-1], input_shape[0][-1]),
input_shape[-1]]
class GraphSToS_edge(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
#self.bond_filters = bond_filters
self.pooling = pooling
super(GraphSToS_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphSToS_edge, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[1][-1]
self.w_ss = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_ss')
self.w_edge_ss = self.add_weight(shape=(bond_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_edge_ss')
self.w_mp_ss = self.add_weight(shape=(self.filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_ss')
if self.bias_initializer is not None:
self.b_ss = self.add_weight(shape=(self.filters,),
name='b_ss',
initializer=self.bias_initializer)
self.b_edge_ss = self.add_weight(shape=(self.filters,),
name='b_edge_ss',
initializer=self.bias_initializer)
self.b_mp_ss = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_mp_ss')
super(GraphSToS_edge, self).build(input_shape)
def call(self, inputs, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
scalar_features, bonds = inputs
# _scalar_features = scalar_features
# Get parameters
max_atoms = int(scalar_features.shape[1])
atom_feat = int(scalar_features.shape[-1])
# Expand scalar features to 4D
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, 1, atom_feat])
scalar_features = tf.tile(scalar_features, [1, 1, max_atoms, 1]) # (samples, max_atoms, max_atoms, atom_feat)
# Combine between atoms
scalar_features_t = tf.transpose(scalar_features, perm=[0, 2, 1, 3])
scalar_features = tf.concat([scalar_features, scalar_features_t], -1)
# Linear combination
scalar_features = tf.reshape(scalar_features, [-1, atom_feat * 2])
# scalar_features = tf.matmul(scalar_features, self.w_ss) + self.b_ss
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_ss)
else:
scalar_features = tf.matmul(scalar_features, self.w_ss) + self.b_ss
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, max_atoms, self.filters])
if bonds is not None:
# Linear combination of bond features
# bonds = tf.matmul(bonds, self.w_edge_ss) + self.b_edge_ss
if self.bias_initializer is None:
bonds = tf.matmul(bonds, self.w_edge_ss)
else:
bonds = tf.matmul(bonds, self.w_edge_ss) + self.b_edge_ss
# multiply bond feature
scalar_features = tf.linalg.einsum('aijk,aijk->aijk', scalar_features, bonds)
# scalar_features = tf.matmul(scalar_features, self.w_mp_ss) + self.b_mp_ss
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_mp_ss)
else:
scalar_features = tf.matmul(scalar_features, self.w_mp_ss) + self.b_mp_ss
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
scalar_features = tf.linalg.einsum('aijk,aij->aijk', scalar_features, mask)
# Activation
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], self.filters
class GraphSToV_edge(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.pooling = pooling
super(GraphSToV_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphSToV_edge, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[2][-1]
self.w_sv = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_sv')
self.w_edge_sv = self.add_weight(shape=(bond_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_edge_sv')
self.w_mp_sv = self.add_weight(shape=(self.filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_sv')
if self.bias_initializer is not None:
self.b_sv = self.add_weight(shape=(self.filters,),
name='b_sv',
initializer=self.bias_initializer)
self.b_edge_sv = self.add_weight(shape=(self.filters,),
name='b_edge_sv',
initializer=self.bias_initializer)
self.b_mp_sv = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_mp_sv')
super(GraphSToV_edge, self).build(input_shape)
def call(self, inputs, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
scalar_features, distances, bonds = inputs
# _scalar_features = scalar_features
# Get parameters
max_atoms = int(scalar_features.shape[1])
atom_feat = int(scalar_features.shape[-1])
coor_dims = int(distances.shape[-1])
# Expand scalar features to 4D
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, 1, atom_feat])
scalar_features = tf.tile(scalar_features, [1, 1, max_atoms, 1])
# Combine between atoms
scalar_features_t = tf.transpose(scalar_features, perm=[0, 2, 1, 3])
scalar_features = tf.concat([scalar_features, scalar_features_t], -1)
# Apply weights
scalar_features = tf.reshape(scalar_features, [-1, atom_feat * 2])
# scalar_features = tf.matmul(scalar_features, self.w_sv) + self.b_sv
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_sv)
else:
scalar_features = tf.matmul(scalar_features, self.w_sv) + self.b_sv
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, max_atoms, 1, self.filters])
scalar_features = tf.tile(scalar_features, [1, 1, 1, coor_dims, 1])
if bonds is not None:
# Linear combination of bond features
# bonds = tf.matmul(bonds, self.w_edge_sv) + self.b_edge_sv
if self.bias_initializer is None:
bonds = tf.matmul(bonds, self.w_edge_sv)
else:
bonds = tf.matmul(bonds, self.w_edge_sv) + self.b_edge_sv
# multiply bond feature
scalar_features = tf.linalg.einsum('aijkl,aijl->aijkl', scalar_features, bonds)
# scalar_features = tf.matmul(scalar_features, self.w_mp_sv) + self.b_mp_sv
if self.bias_initializer is None:
scalar_features = tf.matmul(scalar_features, self.w_mp_sv)
else:
scalar_features = tf.matmul(scalar_features, self.w_mp_sv) + self.b_mp_sv
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
scalar_features = tf.linalg.einsum('aijkl,aij->aijkl', scalar_features, mask)
# Expand distances to 5D
distances = tf.reshape(distances, [-1, max_atoms, max_atoms, coor_dims, 1])
distances = tf.tile(distances, [1, 1, 1, 1, self.filters])
# Tensor product
vector_features = tf.multiply(scalar_features, distances)
# Activation
vector_features = self.activation(vector_features)
'''# Integrate over second atom axis
if self.pooling == "sum":
vector_features = tf.reduce_sum(vector_features, axis=2)
elif self.pooling == "max":
vector_features = tf.reduce_max(vector_features, axis=2)
elif self.pooling == "mean":
vector_features = tf.reduce_mean(vector_features, axis=2)'''
return vector_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], input_shape[1][-1], self.filters
class GraphVToV_edge(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.pooling = pooling
super(GraphVToV_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphVToV_edge, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[1][-1]
self.w_vv = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_vv')
self.w_edge_vv = self.add_weight(shape=(bond_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_edge_vv')
self.w_mp_vv = self.add_weight(shape=(self.filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_vv')
if self.bias_initializer is not None:
self.b_vv = self.add_weight(shape=(self.filters,),
name='b_vv',
initializer=self.bias_initializer)
self.b_edge_vv = self.add_weight(shape=(self.filters,),
name='b_edge_vv',
initializer=self.bias_initializer)
self.b_mp_vv = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_mp_vv')
super(GraphVToV_edge, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features = (samples, max_atoms, coor_dims, atom_feat)
vector_features, bonds = inputs
# Get parameters
max_atoms = int(vector_features.shape[1])
atom_feat = int(vector_features.shape[-1])
coor_dims = int(vector_features.shape[-2])
# Expand vector features to 5D
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, coor_dims, atom_feat])
vector_features = tf.tile(vector_features, [1, 1, max_atoms, 1, 1])
# Combine between atoms
vector_features_t = tf.transpose(vector_features, perm=[0, 2, 1, 3, 4])
vector_features = tf.concat([vector_features, vector_features_t], -1)
# Apply weights
vector_features = tf.reshape(vector_features, [-1, atom_feat * 2])
# vector_features = tf.matmul(vector_features, self.w_vv) + self.b_vv
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_vv)
else:
vector_features = tf.matmul(vector_features, self.w_vv) + self.b_vv
vector_features = tf.reshape(vector_features, [-1, max_atoms, max_atoms, coor_dims, self.filters])
# multiply bond feature
if bonds is not None:
# Linear combination of bond features
# bonds = tf.matmul(bonds, self.w_edge_vv) + self.b_edge_vv
if self.bias_initializer is None:
bonds = tf.matmul(bonds, self.w_edge_vv)
else:
bonds = tf.matmul(bonds, self.w_edge_vv) + self.b_edge_vv
# multiply bond feature
vector_features = tf.linalg.einsum('aijkl,aijl->aijkl', vector_features, bonds)
# vector_features = tf.matmul(vector_features, self.w_mp_vv) + self.b_mp_vv
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_mp_vv)
else:
vector_features = tf.matmul(vector_features, self.w_mp_vv) + self.b_mp_vv
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
vector_features = tf.linalg.einsum('aijkl,aij->aijkl', vector_features, mask)
# Activation
vector_features = self.activation(vector_features)
'''# Integrate over second atom axis
if self.pooling == "sum":
vector_features = tf.reduce_sum(vector_features, axis=2)
elif self.pooling == "max":
vector_features = tf.reduce_max(vector_features, axis=2)
elif self.pooling == "mean":
vector_features = tf.reduce_mean(vector_features, axis=2)'''
return vector_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], input_shape[0][-2], self.filters
class GraphVToS_edge(Layer):
def __init__(self,
filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.pooling = pooling
super(GraphVToS_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphVToS_edge, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[2][-1]
self.w_vs = self.add_weight(shape=(atom_feat * 2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_vs')
self.w_edge_vs = self.add_weight(shape=(bond_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_edge_vs')
self.w_mp_vs = self.add_weight(shape=(self.filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_vs')
if self.bias_initializer is not None:
self.b_vs = self.add_weight(shape=(self.filters,),
name='b_vs',
initializer=self.bias_initializer)
self.b_edge_vs = self.add_weight(shape=(self.filters,),
name='b_edge_vs',
initializer=self.bias_initializer)
self.b_mp_vs = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_mp_vs')
super(GraphVToS_edge, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features = (samples, max_atoms, coor_dims, atom_feat)
# distances = (samples, max_atoms, max_atoms, coor_dims)
vector_features, distances, bonds = inputs
# Get parameters
max_atoms = int(vector_features.shape[1])
atom_feat = int(vector_features.shape[-1])
coor_dims = int(vector_features.shape[-2])
# Expand vector features to 5D
vector_features = tf.reshape(vector_features, [-1, max_atoms, 1, coor_dims, atom_feat])
vector_features = tf.tile(vector_features, [1, 1, max_atoms, 1, 1])
# Combine between atoms
vector_features_t = tf.transpose(vector_features, perm=[0, 2, 1, 3, 4])
vector_features = tf.concat([vector_features, vector_features_t], -1)
# Apply weights
vector_features = tf.reshape(vector_features, [-1, atom_feat * 2])
# vector_features = tf.matmul(vector_features, self.w_vs) + self.b_vs
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_vs)
else:
vector_features = tf.matmul(vector_features, self.w_vs) + self.b_vs
vector_features = tf.reshape(vector_features, [-1, max_atoms, max_atoms, coor_dims, self.filters])
# multiply bond feature
if bonds is not None:
# Linear combination of bond features
# bonds = tf.matmul(bonds, self.w_edge_vs) + self.b_edge_vs
if self.bias_initializer is None:
bonds = tf.matmul(bonds, self.w_edge_vs)
else:
bonds = tf.matmul(bonds, self.w_edge_vs) + self.b_edge_vs
# multiply bond feature
vector_features = tf.linalg.einsum('aijkl,aijl->aijkl', vector_features, bonds)
# vector_features = tf.matmul(vector_features, self.w_mp_vs) + self.b_mp_vs
if self.bias_initializer is None:
vector_features = tf.matmul(vector_features, self.w_mp_vs)
else:
vector_features = tf.matmul(vector_features, self.w_mp_vs) + self.b_mp_vs
# masking
if mask is not None:
mask = tf.reshape(mask, [-1, max_atoms, 1])
mask = tf.tile(mask, [1, 1, max_atoms])
vector_features = tf.linalg.einsum('aijkl,aij->aijkl', vector_features, mask)
# # Calculate r^ = r / |r| and expand it to 5D
# distances_hat = tf.sqrt(tf.reduce_sum(tf.square(distances), axis=-1, keepdims=True))
# distances_hat = distances_hat + tf.cast(tf.equal(distances_hat, 0), tf.float32)
# distances_hat = tf.divide(distances, distances_hat)
# distances_hat = tf.reshape(distances_hat, [-1, max_atoms, max_atoms, coor_dims, 1])
# distances_hat = tf.tile(distances_hat, [1, 1, 1, 1, self.filters])
distances_hat = tf.reshape(distances, [-1, max_atoms, max_atoms, coor_dims, 1])
distances_hat = tf.tile(distances_hat, [1, 1, 1, 1, self.filters])
# Projection of v onto r = v (dot) r^
scalar_features = tf.multiply(vector_features, distances_hat)
scalar_features = tf.reduce_sum(scalar_features, axis=-2)
# Activation
scalar_features = self.activation(scalar_features)
'''# Integrate over second atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=2)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=2)
elif self.pooling == "mean":
scalar_features = tf.reduce_mean(scalar_features, axis=2)'''
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][1], self.filters
class GraphConv_edge(Layer):
def __init__(self,
filters,
bond_filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.bond_filters = bond_filters
self.pooling = pooling
super(GraphConv_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConv_edge, self).get_config()
base_config['filters'] = self.filters
base_config['bond_filters'] = self.bond_filters
base_config['pooling'] = self.pooling
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[-1][-1]
self.w_conv_scalar = self.add_weight(shape=(atom_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv_scalar')
self.b_conv_scalar = self.add_weight(shape=(self.filters,),
name='b_conv_scalar',
initializer=self.bias_initializer)
self.w_mp_scalar = self.add_weight(shape=(atom_feat + bond_feat, self.bond_filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_scalar')
self.b_mp_scalar = self.add_weight(shape=(self.bond_filters,),
initializer=self.bias_initializer,
name='b_mp_scalar')
self.w_update_scalar = self.add_weight(shape=(self.filters + self.bond_filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_update_scalar')
self.b_update_scalar = self.add_weight(shape=(self.filters,),
name='b_update_scalar',
initializer=self.bias_initializer)
super(GraphConv_edge, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
scalar_features, bonds = inputs
# Get parameters
max_atoms = int(scalar_features.shape[1])
atom_feat = int(scalar_features.shape[-1])
# Message passing for edge
if len(scalar_features.shape) == 3:
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, 1, atom_feat])
scalar_features = tf.tile(scalar_features, [1, 1, max_atoms, 1]) # (samples, max_atoms, max_atoms, atom_feat)
m = tf.concat([scalar_features, bonds], axis=-1)
m = tf.matmul(m, self.w_mp_scalar) + self.b_mp_scalar
m = self.activation(m)
m = tf.reduce_sum(m, axis=2)
# Integrate over second atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=2)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=2)
elif self.pooling == "mean":
scalar_features = tf.reduce_mean(scalar_features, axis=2)
# Linear combination of scalar
scalar_features = tf.reshape(scalar_features, [-1, atom_feat])
scalar_features = tf.matmul(scalar_features, self.w_conv_scalar) + self.b_conv_scalar
scalar_features = self.activation(scalar_features)
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, self.filters])
# Update
scalar_features = tf.concat([scalar_features, m], axis=-1)
scalar_features = tf.reshape(scalar_features, [-1, self.filters + self.bond_filters])
scalar_features = tf.matmul(scalar_features, self.w_update_scalar) + self.b_update_scalar
scalar_features = self.activation(scalar_features)
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, self.filters])
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], self.filters
class GraphConvS_edge(Layer):
def __init__(self,
filters,
bond_filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.bond_filters = bond_filters
self.pooling = pooling
super(GraphConvS_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConvS_edge, self).get_config()
base_config['filters'] = self.filters
base_config['bond_filters'] = self.bond_filters
base_config['pooling'] = self.pooling
return base_config
def build(self, input_shape):
atom_feat_1 = input_shape[0][-1]
atom_feat_2 = input_shape[1][-1]
bond_feat = input_shape[2][-1]
self.w_conv_scalar = self.add_weight(shape=(atom_feat_1 + atom_feat_2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv_scalar')
self.b_conv_scalar = self.add_weight(shape=(self.filters,),
name='b_conv_scalar',
initializer=self.bias_initializer)
self.w_mp_scalar = self.add_weight(shape=(atom_feat_1 + atom_feat_2 + bond_feat, self.bond_filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_scalar')
self.b_mp_scalar = self.add_weight(shape=(self.bond_filters,),
initializer=self.bias_initializer,
name='b_mp_scalar')
self.w_update_scalar = self.add_weight(shape=(self.filters + self.bond_filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_update_scalar')
self.b_update_scalar = self.add_weight(shape=(self.filters,),
name='b_update_scalar',
initializer=self.bias_initializer)
super(GraphConvS_edge, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features_1 = (samples, max_atoms, max_atoms, atom_feat)
# scalar_features_2 = (samples, max_atoms, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
scalar_features_1, scalar_features_2, bonds = inputs
# Get parameters
max_atoms = int(scalar_features_1.shape[1])
atom_feat_1 = int(scalar_features_1.shape[-1])
atom_feat_2 = int(scalar_features_2.shape[-1])
# Concatenate two features
scalar_features = tf.concat([scalar_features_1, scalar_features_2], axis=-1)
# Message passing for edge
m = tf.concat([scalar_features, bonds], axis=-1)
m = tf.matmul(m, self.w_mp_scalar) + self.b_mp_scalar
m = self.activation(m)
m = tf.reduce_sum(m, axis=2)
# Integrate over second atom axis
if self.pooling == "sum":
scalar_features = tf.reduce_sum(scalar_features, axis=2)
elif self.pooling == "max":
scalar_features = tf.reduce_max(scalar_features, axis=2)
elif self.pooling == "mean":
scalar_features = tf.reduce_mean(scalar_features, axis=2)
# Linear combination of scalar
scalar_features = tf.reshape(scalar_features, [-1, atom_feat_1 + atom_feat_2])
scalar_features = tf.matmul(scalar_features, self.w_conv_scalar) + self.b_conv_scalar
scalar_features = self.activation(scalar_features)
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, self.filters])
# Update
scalar_features = tf.concat([scalar_features, m], axis=-1)
scalar_features = tf.reshape(scalar_features, [-1, self.filters + self.bond_filters])
scalar_features = tf.matmul(scalar_features, self.w_update_scalar) + self.b_update_scalar
scalar_features = self.activation(scalar_features)
scalar_features = tf.reshape(scalar_features, [-1, max_atoms, self.filters])
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], self.filters
class GraphConvV_edge(Layer):
def __init__(self,
filters,
bond_filters,
pooling='sum',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.bond_filters = bond_filters
self.pooling = pooling
super(GraphConvV_edge, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConvV_edge, self).get_config()
base_config['filters'] = self.filters
base_config['bond_filters'] = self.bond_filters
base_config['pooling'] = self.pooling
return base_config
def build(self, input_shape):
atom_feat_1 = input_shape[0][-1]
atom_feat_2 = input_shape[1][-1]
bond_feat = input_shape[2][-1]
self.w_conv_vector = self.add_weight(shape=(atom_feat_1 + atom_feat_2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv_vector')
self.b_conv_vector = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_conv_vector')
self.w_mp_vector = self.add_weight(shape=(atom_feat_1 + atom_feat_2 + bond_feat, self.bond_filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_mp_vector')
self.b_mp_vector = self.add_weight(shape=(self.bond_filters,),
initializer=self.bias_initializer,
name='b_mp_vector')
self.w_update_vector = self.add_weight(shape=(self.filters + self.bond_filters, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_update_vector')
self.b_update_vector = self.add_weight(shape=(self.filters,),
name='b_update_vector',
initializer=self.bias_initializer)
super(GraphConvV_edge, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# vector_features_1 = (samples, max_atoms, max_atoms, coor_dims, atom_feat)
# vector_features_2 = (samples, max_atoms, max_atoms, coor_dims, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
# adjacency = (samples, max_atoms, max_atoms)
vector_features_1, vector_features_2, bonds = inputs
# Get parameters
max_atoms = int(vector_features_1.shape[1])
atom_feat_1 = int(vector_features_1.shape[-1])
atom_feat_2 = int(vector_features_2.shape[-1])
bond_feat = int(bonds.shape[-1])
coor_dims = int(vector_features_1.shape[-2])
# Concatenate two features
vector_features = tf.concat([vector_features_1, vector_features_2], axis=-1)
# Message passing for edge
m = tf.reshape(bonds, [-1, max_atoms, max_atoms, 1, bond_feat])
m = tf.tile(m, [1, 1, 1, coor_dims, 1])
m = tf.concat([vector_features, m], axis=-1)
m = tf.matmul(m, self.w_mp_vector) + self.b_mp_vector
m = self.activation(m)
m = tf.reduce_sum(m, axis=2)
# Integrate over second atom axis
if self.pooling == "sum":
vector_features = tf.reduce_sum(vector_features, axis=2)
elif self.pooling == "max":
vector_features = tf.reduce_max(vector_features, axis=2)
elif self.pooling == "avg":
vector_features = tf.reduce_mean(vector_features, axis=2)
# Linear combination
vector_features = tf.reshape(vector_features, [-1, atom_feat_1 + atom_feat_2])
vector_features = tf.matmul(vector_features, self.w_conv_vector) + self.b_conv_vector
vector_features = self.activation(vector_features)
vector_features = tf.reshape(vector_features, [-1, max_atoms, coor_dims, self.filters])
# Update
vector_features = tf.concat([vector_features, m], axis=-1)
vector_features = tf.reshape(vector_features, [-1, self.filters + self.bond_filters])
vector_features = tf.matmul(vector_features, self.w_update_vector) + self.b_update_vector
vector_features = self.activation(vector_features)
vector_features = tf.reshape(vector_features, [-1, max_atoms, 3, self.filters])
return vector_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], input_shape[0][-2], self.filters
# baseline
class GraphConv(Layer):
def __init__(self,
filters,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
super(GraphConv, self).__init__(**kwargs)
def get_config(self):
base_config = super(GraphConv, self).get_config()
base_config['filters'] = self.filters
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
self.w_conv = self.add_weight(shape=(atom_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_conv')
if self.bias_initializer is not None:
self.b_conv = self.add_weight(shape=(self.filters,),
name='b_conv',
initializer=self.bias_initializer)
super(GraphConv, self).build(input_shape)
def call(self, inputs, bond=None, mask=None):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# adjacency = (samples, max_atoms, max_atoms)
scalar_features, adjacency = inputs
# Matrix multiplication X'=AXW
scalar_features = tf.linalg.einsum('aij,ajk->aik', adjacency, scalar_features)
if self.bias_initializer is None:
scalar_features = tf.linalg.einsum('aij,jk->aik', scalar_features, self.w_conv)
else:
scalar_features = tf.linalg.einsum('aij,jk->aik', scalar_features, self.w_conv) + self.b_conv
# Activation
scalar_features = self.activation(scalar_features)
return scalar_features
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], self.filters
class WeaveLayer(Layer):
def __init__(self,
filters=50,
atom_filters=50,
pair_filters=50,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation="relu",
update_pair=True,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.filters = filters
self.atom_filters = atom_filters
self.pair_filters = pair_filters
self.update_pair = update_pair
super(WeaveLayer, self).__init__(**kwargs)
def get_config(self):
base_config = super(WeaveLayer, self).get_config()
base_config['filters'] = self.filters
base_config['atom_filters'] = self.atom_filters
base_config['pair_filters'] = self.pair_filters
base_config['update_pair'] = self.update_pair
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
pair_feat = input_shape[1][-1]
self.w_aa = self.add_weight(shape=(atom_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_aa')
self.w_pa = self.add_weight(shape=(pair_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_pa')
self.w_a = self.add_weight(shape=(self.filters * 2, self.atom_filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_a')
if self.bias_initializer is not None:
self.b_aa = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_aa')
self.b_pa = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_pa')
self.b_a = self.add_weight(shape=(self.atom_filters,),
initializer=self.bias_initializer,
name='b_a')
if self.update_pair:
self.w_pp = self.add_weight(shape=(pair_feat, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_pp')
self.w_ap1 = self.add_weight(shape=(atom_feat*2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_ap1')
self.w_ap2 = self.add_weight(shape=(atom_feat*2, self.filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_ap2')
self.w_p = self.add_weight(shape=(self.filters + self.filters, self.pair_filters),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_p')
if self.bias_initializer is not None:
self.b_pp = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_pp')
self.b_ap1 = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_ap1')
self.b_ap2 = self.add_weight(shape=(self.filters,),
initializer=self.bias_initializer,
name='b_ap2')
self.b_p = self.add_weight(shape=(self.pair_filters,),
initializer=self.bias_initializer,
name='b_p')
super(WeaveLayer, self).build(input_shape)
def call(self, inputs):
# Import graph tensors
# a_in = (samples, max_atoms, atom_feat)
# p_in = (samples, max_atoms, max_atoms, bond_feat)
a_in, p_in, adjacency = inputs
# Get parameters
max_atoms = int(a_in.shape[1])
atom_feat = int(a_in.shape[-1])
bond_feat = int(p_in.shape[-1])
# Update node features
if self.bias_initializer is None:
a_to_a = tf.linalg.einsum('aij,jk->aik', a_in, self.w_aa)
p_to_a = tf.linalg.einsum('aijk,kl->ail', p_in, self.w_pa)
else:
a_to_a = tf.linalg.einsum('aij,jk->aik', a_in, self.w_aa) + self.b_aa
p_to_a = tf.linalg.einsum('aijk,kl->aijl', p_in, self.w_pa) + self.b_pa
a_to_a = self.activation(a_to_a)
p_to_a = self.activation(p_to_a)
p_to_a = tf.linalg.einsum('aijk,aij->aijk', p_to_a, adjacency)
p_to_a = tf.linalg.einsum('aijk->aik', p_to_a)
new_a = tf.concat([a_to_a, p_to_a], axis=-1)
if self.bias_initializer is None:
new_a = tf.linalg.einsum('aij,jk->aik', new_a, self.w_a)
else:
new_a = tf.linalg.einsum('aij,jk->aik', new_a, self.w_a) + self.b_a
new_a = self.activation(new_a)
if self.update_pair:
# Update edge features
# Expand scalar features to 4D
a_to_p = tf.reshape(a_in, [-1, max_atoms, 1, atom_feat])
a_to_p = tf.tile(a_to_p, [1, 1, max_atoms, 1]) # (samples, max_atoms, max_atoms, atom_feat)
a_to_p = tf.linalg.einsum('aijk,aij->aijk', a_to_p, adjacency)
a_to_p_t = tf.transpose(a_to_p, perm=[0, 2, 1, 3]) # (samples, max_atoms, max_atoms, atom_feat)
a_to_p = tf.concat([a_to_p, a_to_p_t], -1)
if self.bias_initializer is None:
a_to_p_1 = tf.linalg.einsum('aijk,kl->aijl', a_to_p, self.w_ap1)
a_to_p_2 = tf.linalg.einsum('aijk,kl->ajil', a_to_p, self.w_ap2)
p_to_p = tf.linalg.einsum('aijk,kl->aijl', p_in, self.w_pp)
else:
a_to_p_1 = tf.linalg.einsum('aijk,kl->aijl', a_to_p, self.w_ap1) + self.b_ap1
a_to_p_2 = tf.linalg.einsum('aijk,kl->ajil', a_to_p, self.w_ap2) + self.b_ap2
p_to_p = tf.linalg.einsum('aijk,kl->aijl', p_in, self.w_pp) + self.b_pp
# a_to_p_1, a_to_p_2 : (b, max_atoms, self.filters)
new_a_to_p = Add()([a_to_p_1, a_to_p_2])
new_a_to_p = self.activation(new_a_to_p)
# a_to_p : (b, max_atoms, max_atoms, self.filters)
p_to_p = self.activation(p_to_p)
new_p = tf.concat([new_a_to_p, p_to_p], axis=-1)
new_p = tf.linalg.einsum('aijk,aij->aijk', new_p, adjacency)
if self.bias_initializer is None:
new_p = tf.linalg.einsum('aijk,kl->aijl', new_p, self.w_p)
else:
new_p = tf.linalg.einsum('aijk,kl->aijl', new_p, self.w_p) + self.b_p
new_p = self.activation(new_p)
return [new_a, new_p]
else:
return [new_a, p_in]
def compute_output_shape(self, input_shape):
return [(input_shape[0][0], input_shape[0][1], self.atom_filters),
(input_shape[1][0], input_shape[1][1], input_shape[1][1], self.pair_filters)]
class WeaveGather(Layer):
def __init__(self,
n_feat=128,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
gaussian_expand=True,
compress_post_gaussian_expansion=False,
activation="tanh",
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.gaussian_expand = gaussian_expand
self.compress_post_gaussian_expansion = compress_post_gaussian_expansion
self.n_feat = n_feat
super(WeaveGather, self).__init__(**kwargs)
def get_config(self):
base_config = super(WeaveGather, self).get_config()
base_config['n_feat'] = self.n_feat
base_config['gaussian_expand'] = self.gaussian_expand
base_config['compress_post_gaussian_expansion'] = self.compress_post_gaussian_expansion
base_config['activation'] = self.activation
return base_config
def build(self, input_shape):
atom_feat = input_shape[-1]
if self.compress_post_gaussian_expansion:
self.w_compress = self.add_weight(shape=(atom_feat * 11, self.n_feat),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_compress')
if self.bias_initializer is not None:
self.b_compress = self.add_weight(shape=(self.n_feat,),
initializer=self.bias_initializer,
name='b_compress')
super(WeaveGather, self).build(input_shape)
def call(self, inputs, mask=None):
# Import graph tensors
# a_in = (samples, max_atoms, atom_feat)
# adjacency = (samples, max_atoms, max_atoms)
a_in = inputs
max_atoms = a_in.shape[1]
atom_feat = a_in.shape[-1]
if self.gaussian_expand:
outputs = self.gaussian_histogram(a_in)
# Integrate over second atom axis
output_molecules = tf.reduce_sum(outputs, axis=1) # (b, atom_feat * 11)
if self.compress_post_gaussian_expansion:
output_molecules = tf.matmul(output_molecules, self.w_compress) + self.b_compress
output_molecules = self.activation(output_molecules)
else:
# Integrate over second atom axis
output_molecules = tf.reduce_sum(a_in, axis=1) # (b, atom_feat * 11)
return output_molecules
def compute_output_shape(self, input_shape):
if self.gaussian_expand and self.compress_post_gaussian_expansion:
return input_shape[0], input_shape[-1]
elif self.gaussian_expand:
return input_shape[0], input_shape[-1] * 11
elif not self.gaussian_expand:
return input_shape[0], input_shape[-1]
def gaussian_histogram(self, x):
"""Expands input into a set of gaussian histogram bins.
Parameters
----------
x: tf.Tensor
Of shape `(N, n_feat)`
Examples
--------
This method uses 11 bins spanning portions of a Gaussian with zero mean
and unit standard deviation.
>>> gaussian_memberships = [(-1.645, 0.283), (-1.080, 0.170),
... (-0.739, 0.134), (-0.468, 0.118),
... (-0.228, 0.114), (0., 0.114),
... (0.228, 0.114), (0.468, 0.118),
... (0.739, 0.134), (1.080, 0.170),
... (1.645, 0.283)]
We construct a Gaussian at `gaussian_memberships[i][0]` with standard
deviation `gaussian_memberships[i][1]`. Each feature in `x` is assigned
the probability of falling in each Gaussian, and probabilities are
normalized across the 11 different Gaussians.
Returns
-------
outputs: tf.Tensor
Of shape `(N, 11*n_feat)`
"""
import tensorflow_probability as tfp
gaussian_memberships = [(-1.645, 0.283), (-1.080, 0.170), (-0.739, 0.134),
(-0.468, 0.118), (-0.228, 0.114), (0., 0.114),
(0.228, 0.114), (0.468, 0.118), (0.739, 0.134),
(1.080, 0.170), (1.645, 0.283)]
dist = [tfp.distributions.Normal(p[0], p[1]) for p in gaussian_memberships]
dist_max = [dist[i].prob(gaussian_memberships[i][0]) for i in range(11)]
outputs = [dist[i].prob(x) / dist_max[i] for i in range(11)]
outputs = tf.stack(outputs, axis=3)
outputs = outputs / tf.reduce_sum(outputs, axis=3, keepdims=True)
outputs = tf.reshape(outputs, [-1, x.shape[1], x.shape[2] * 11])
return outputs
class MessagePassing(Layer):
def __init__(self,
t=5,
n_hidden=100,
message_fn='enn',
update_fn='gru',
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
if activation in ["relu", "tanh", "sigmoid", None]:
self.activation = activations.get(activation)
else:
self.activation = activation
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.t = t
self.message_fn = message_fn
self.update_fn = update_fn
self.n_hidden = n_hidden
super(MessagePassing, self).__init__(**kwargs)
def get_config(self):
base_config = super(MessagePassing, self).get_config()
base_config['t'] = self.t
base_config['n_hidden'] = self.n_hidden
base_config['message_fn'] = self.message_fn
base_config['update_fn'] = self.update_fn
return base_config
def build(self, input_shape):
atom_feat = input_shape[0][-1]
bond_feat = input_shape[1][-1]
if self.message_fn == 'enn':
# Default message function: edge network, update function: GRU
# more options to be implemented
self.message_fn = EdgeNetwork(self.n_hidden)
if self.update_fn == 'gru':
self.update_fn = GatedRecurrentUnit(self.n_hidden)
self.built = True
super(MessagePassing, self).build(input_shape)
def call(self, inputs):
# Import graph tensors
# scalar_features = (samples, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
# adjacency = (samples, max_atoms, max_atoms)
""" Perform T steps of message passing """
atoms, bonds = inputs
# Get parameters
max_atoms = int(atoms.shape[1])
atom_feat = int(atoms.shape[-1])
if atom_feat < self.n_hidden:
pad_length = self.n_hidden - atom_feat
out = tf.pad(atoms, ((0, 0), (0, 0), (0, pad_length)), mode='CONSTANT')
elif atom_feat > self.n_hidden:
raise ValueError("Too large initial feature vector")
else:
out = atoms
for i in range(self.t):
# message = self.message_fn([out, bonds, adjacency])
message = self.message_fn([out, bonds])
out = self.update_fn([out, message])
return out
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], self.n_hidden
class EdgeNetwork(Layer):
def __init__(self,
n_hidden=100,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
activation=None,
**kwargs):
self.n_hidden = n_hidden
self.kernel_initializer = initializers.get(kernel_initializer)
# self.bias_initializer = initializers.get(bias_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.n_hidden = n_hidden
super(EdgeNetwork, self).__init__(**kwargs)
def get_config(self):
base_config = super(EdgeNetwork, self).get_config()
base_config['n_hidden'] = self.n_hidden
return base_config
def build(self, input_shape):
bond_feat = input_shape[1][-1]
self.w_edge = self.add_weight(shape=(bond_feat, self.n_hidden * self.n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='w_edge')
if self.bias_initializer is not None:
self.b_edge = self.add_weight(shape=(self.n_hidden * self.n_hidden,),
initializer=self.bias_initializer,
name='b_edge')
super(EdgeNetwork, self).build(input_shape)
def call(self, inputs):
# Import graph tensors
# atoms = (samples, max_atoms, atom_feat)
# bonds = (samples, max_atoms, max_atoms, bond_feat)
# adjacency = (samples, max_atoms, max_atoms)
atoms, bonds = inputs
# Get parameters
max_atoms = int(atoms.shape[1])
if self.bias_initializer is None:
adj_from_bond = tf.matmul(bonds, self.w_edge)
else:
adj_from_bond = tf.matmul(bonds, self.w_edge) + self.b_edge
adj_from_bond = tf.reshape(adj_from_bond, (-1, max_atoms, max_atoms, self.n_hidden, self.n_hidden))
# (samples, max_atoms, max_atoms, self.n_hidden, n_hidden)
atoms_expand = tf.reshape(atoms, [-1, max_atoms, 1, self.n_hidden])
atoms_expand = tf.tile(atoms_expand, [1, 1, max_atoms, 1]) # (samples, max_atoms, max_atoms, atom_feat)
#atoms_expand = tf.linalg.einsum('aijk,aij->aijk', atoms_expand, adjacency)
atoms_out = tf.linalg.einsum('aijkl,aijk->aijk', adj_from_bond, atoms_expand) # (samples, max_atoms, max_atoms, n_hidden)
#atoms_out = tf.linalg.einsum('aijk,aij->aik', atoms_out, adjacency) # (samples, max_atoms, n_hidden)
#atoms_out = tf.linalg.einsum('aijk,aij->aijk', atoms_out, adjacency)
atoms_out = tf.reduce_sum(atoms_out, axis=2)
return atoms_out
def compute_output_shape(self, input_shape):
return input_shape[0][0], input_shape[0][1], self.n_hidden
class GatedRecurrentUnit(Layer):
""" Submodule for Message Passing """
def __init__(self, n_hidden=100,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
bias_initializer='zeros',
**kwargs):
super(GatedRecurrentUnit, self).__init__(**kwargs)
self.n_hidden = n_hidden
self.kernel_initializer = initializers.get(kernel_initializer)
if bias_initializer is None:
self.bias_initializer = None
else:
self.bias_initializer = initializers.get(bias_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
def get_config(self):
config = super(GatedRecurrentUnit, self).get_config()
config['n_hidden'] = self.n_hidden
return config
def build(self, input_shape):
n_hidden = self.n_hidden
self.Wz = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Wz')
self.Wr = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Wr')
self.Wh = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Wh')
self.Uz = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Uz')
self.Ur = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Ur')
self.Uh = self.add_weight(shape=(n_hidden, n_hidden),
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
name='Uh')
if self.bias_initializer is not None:
self.bz = self.add_weight(shape=(n_hidden,),
initializer=self.bias_initializer,
name='bz')
self.br = self.add_weight(shape=(n_hidden,),
initializer=self.bias_initializer,
name='br')
self.bh = self.add_weight(shape=(n_hidden,),
initializer=self.bias_initializer,
name='bh')
def call(self, inputs):
# inputs[0] = out = atom_feat, inputs[1] = message
z = tf.nn.sigmoid(
tf.matmul(inputs[1], self.Wz) + tf.matmul(inputs[0], self.Uz) + self.bz)
r = tf.nn.sigmoid(
tf.matmul(inputs[1], self.Wr) + tf.matmul(inputs[0], self.Ur) + self.br)
h = (1 - z) * tf.nn.tanh(
tf.matmul(inputs[1], self.Wh) + tf.matmul(inputs[0] * r, self.Uh) +
self.bh) + z * inputs[0]
return h
| 45.542624
| 130
| 0.582227
| 13,574
| 116,999
| 4.753131
| 0.026374
| 0.06922
| 0.039167
| 0.023156
| 0.920814
| 0.899611
| 0.881368
| 0.860785
| 0.849455
| 0.833429
| 0
| 0.015161
| 0.315592
| 116,999
| 2,568
| 131
| 45.560358
| 0.790571
| 0.112061
| 0
| 0.740454
| 0
| 0
| 0.029671
| 0.000929
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074156
| false
| 0.002214
| 0.004427
| 0.013282
| 0.141671
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c0527f3fd42fa888ccd6df43f7e2fc129fcb88b6
| 1,524
|
py
|
Python
|
matching_code/evaluate_single.py
|
KunpengLi1994/PsTuts
|
2063bf0aac8d3fd13bf5a14b80ce05586b8365f9
|
[
"Apache-2.0"
] | 4
|
2020-10-17T15:20:15.000Z
|
2021-05-01T02:42:27.000Z
|
matching_code/evaluate_single.py
|
jingliang95/PsTuts
|
2063bf0aac8d3fd13bf5a14b80ce05586b8365f9
|
[
"Apache-2.0"
] | null | null | null |
matching_code/evaluate_single.py
|
jingliang95/PsTuts
|
2063bf0aac8d3fd13bf5a14b80ce05586b8365f9
|
[
"Apache-2.0"
] | 1
|
2021-11-03T13:58:36.000Z
|
2021-11-03T13:58:36.000Z
|
from vocab import Vocabulary
import evaluation
# import evaluation_vsepp
# evaluation_vsepp.evalrank("runs/save_ori_vse/coco_vse++_combine/model_best.pth.tar", data_path='data/', split="test", fold5=False)
# evaluation.evalrank("runs/coco_vse++_combine_P_S/model_best.pth.tar", data_path='data/', split="test", fold5=False)
# for i in range(1,17):
# print('model:' + str(i) + '\n')
# evaluation.evalrank("runs/save_fc_attn/coco_combine_double_GCN_attn_" + str(i) +"/model_best.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
# print('\n')
# # print('\n')
# # evaluation.evalrank("runs/coco_combine_double_GCN_attn/checkpoint.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
# for i in range(1,10):
# print('model:' + str(i) + '\n')
# evaluation.evalrank("runs/camera/GCN_Attn_" + str(i) +"/model_best.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
# print('\n')
# # print('\n')
# # evaluation.evalrank("runs/coco_combine_double_GCN_attn/checkpoint.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
for i in range(1,5):
print('model:' + str(i) + '\n')
evaluation.evalrank("runs/camera/Only_Visual_GCN_Attn_" + str(i) +"/model_best.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
print('\n')
# print('\n')
# evaluation.evalrank("runs/coco_combine_double_GCN_attn/checkpoint.pth.tar", data_path='../vsepp-master/data_SCAN/', split="test", fold5=False)
| 46.181818
| 173
| 0.704724
| 230
| 1,524
| 4.434783
| 0.2
| 0.094118
| 0.078431
| 0.109804
| 0.827451
| 0.810784
| 0.810784
| 0.810784
| 0.77451
| 0.678431
| 0
| 0.011519
| 0.088583
| 1,524
| 32
| 174
| 47.625
| 0.722822
| 0.788058
| 0
| 0
| 0
| 0
| 0.307692
| 0.197324
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0.333333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
c05d036096a9d4fba40aaaf887e460f901be510d
| 4,004
|
py
|
Python
|
tests/charts-out/test_graphics_charts_barcharts_sampleV4a.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | 1
|
2020-05-21T23:34:55.000Z
|
2020-05-21T23:34:55.000Z
|
tests/charts-out/test_graphics_charts_barcharts_sampleV4a.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
tests/charts-out/test_graphics_charts_barcharts_sampleV4a.py
|
debragail/reportlab-mirror
|
1e5814e1313ed50d5abb65487b207711cb4f7595
|
[
"BSD-3-Clause"
] | null | null | null |
#Autogenerated by ReportLab guiedit do not edit
from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Rect, Line, String
from reportlab.lib.colors import Color, CMYKColor, PCMYKColor
class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing):
def __init__(self,width=400,height=200,*args,**kw):
Drawing.__init__(self,width,height,*args,**kw)
self.transform = (1,0,0,1,0,0)
self.add(Rect(50,50,300,125,rx=0,ry=0,fillColor=None,fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(75,50,100,27.08333,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Rect(225,50,100,41.66667,rx=0,ry=0,fillColor=Color(1,0,0,1),fillOpacity=None,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,49,350,49,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,49,50,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(200,49,200,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(350,49,350,44,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (1,0,0,1,125,44)
v0.add(String(-10,-10,'Ying',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,275,44)
v0.add(String(-10.83,-10,'Yang',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
self.add(Line(50,50,50,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,50,45,50,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,81.25,45,81.25,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,112.5,45,112.5,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,143.75,45,143.75,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
self.add(Line(50,175,45,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,50)
v0.add(String(-5,-4,'0',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,81.25)
v0.add(String(-10,-4,'15',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,112.5)
v0.add(String(-10,-4,'30',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,143.75)
v0.add(String(-10,-4,'45',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
v0=self._nn(Group())
v0.transform = (1,0,0,1,45,175)
v0.add(String(-10,-4,'60',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1)))
if __name__=="__main__": #NORUNTESTS
ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
| 85.191489
| 222
| 0.766733
| 653
| 4,004
| 4.660031
| 0.145482
| 0.03352
| 0.029576
| 0.05258
| 0.8209
| 0.777851
| 0.777851
| 0.777851
| 0.777851
| 0.777851
| 0
| 0.114694
| 0.03971
| 4,004
| 46
| 223
| 87.043478
| 0.676723
| 0.013986
| 0
| 0.166667
| 1
| 0
| 0.035732
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02381
| false
| 0
| 0.047619
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3f6cd9deb0f663b01f4a61eb2c7fef4a8ab0b70f
| 33,551
|
py
|
Python
|
BoManifolds/plot_utils/bo_plots.py
|
NoemieJaquier/GaBOtorch
|
8753ddd0f7de5cc49d68cec88f2ce5e3192f72ca
|
[
"MIT"
] | 21
|
2020-06-18T20:35:33.000Z
|
2022-03-29T16:46:08.000Z
|
BoManifolds/plot_utils/bo_plots.py
|
NoemieJaquier/GaBOtorch
|
8753ddd0f7de5cc49d68cec88f2ce5e3192f72ca
|
[
"MIT"
] | null | null | null |
BoManifolds/plot_utils/bo_plots.py
|
NoemieJaquier/GaBOtorch
|
8753ddd0f7de5cc49d68cec88f2ce5e3192f72ca
|
[
"MIT"
] | 3
|
2021-03-30T09:08:57.000Z
|
2022-03-24T07:53:21.000Z
|
import numpy as np
import torch
import matplotlib.pyplot as plt
import matplotlib.pylab as pl
from BoManifolds.Riemannian_utils.utils import rotation_matrix_from_axis_angle
from BoManifolds.plot_utils.manifolds_plots import plot_spd_cone
plt.rcParams['text.usetex'] = True
plt.rcParams['text.latex.preamble'] = r'\usepackage{bm}'
'''
This file is part of the GaBOtorch library.
Authors: Noemie Jaquier and Leonel Rozo, 2020
License: MIT
Contact: noemie.jaquier@kit.edu, leonel.rozo@de.bosch.com
The functions of this file are based on the function of botorch (in botorch.optim).
'''
def bo_plot_function_sphere(ax, function, true_opt_x=None, true_opt_y=None, xs=None, max_colors=None,
alpha=0.4, elev=30, azim=-60, n_elems=100):
"""
Plot a function on the surface of a 2-sphere
Parameters
----------
:param ax: figure axis
:param function: function to plot
Optional parameters
-------------------
:param true_opt_x: true minimum point on the sphere [1 x 3]
:param true_opt_y: true minimum value
:param xs: samples of the BO [n x 3]
:param max_colors: maximum value (to bound the colors)
:param alpha: transparency
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to approximate the sphere
Returns
-------
:return: max_colors
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# Sphere
u = np.linspace(0.0001, 2 * np.pi, n_elems)
v = np.linspace(0.0001, np.pi, n_elems)
r = 1
x_sphere = r * np.outer(np.cos(u), np.sin(v))
y_sphere = r * np.outer(np.sin(u), np.sin(v))
z_sphere = r * np.outer(np.ones(np.size(u)), np.cos(v))
# Colors in function of the function
colors = np.zeros(x_sphere.shape)
for i in range(x_sphere.shape[0]):
for j in range(x_sphere.shape[1]):
data_tmp = torch.Tensor([[x_sphere[i, j], y_sphere[i, j], z_sphere[i, j]]])
colors[i, j] = function(data_tmp).detach().numpy()
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(colors)
colors = colors - min_colors
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = pl.cm.inferno(np.ones(colors.shape) - colors / max_colors)
ax.plot_surface(x_sphere, y_sphere, z_sphere, rstride=4, cstride=4, facecolors=colors, linewidth=0., alpha=alpha)
# Plots xs
if xs is not None:
for n in range(xs.shape[0]):
ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2], c='k')
# Plot true minimum
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_x[0, 2], s=100, c='limegreen', marker='*')
# Limits
lim = 1.1
ax.set_xlim([-lim, lim])
ax.set_ylim([-lim, lim])
ax.set_zlim([-lim, lim])
return max_colors
def bo_plot_function_sphere_planar(fig, function, xs=None, ys=None, true_opt_x=None, true_opt_y = None,
max_colors=None, alpha=0.2, n_elems=100):
"""
Plot a function on the sphere as 2d-projections
Parameters
----------
:param fig: figure
:param function: function to plot
Optional parameters
-------------------
:param xs: observations of the GP (samples of the BO) [n x 3]
:param ys: value of the observations
:param true_opt_x: true optimum [1 x 3]
:param true_opt_y: true optimum value
:param max_colors: maximum value (to bound the colors)
:param n_elems: number of elements to approximate the sphere
Returns
-------
:return: axis of the two subplots
"""
# Sphere
u = np.linspace(0, 2 * np.pi, n_elems)
v = np.linspace(0, np.pi, n_elems)
r = 1
x_sphere = r * np.outer(np.cos(u), np.sin(v))
y_sphere = r * np.outer(np.sin(u), np.sin(v))
z_sphere = r * np.outer(np.ones(np.size(u)), np.cos(v))
# Value of the function
fmean = np.zeros(x_sphere.shape)
for i in range(x_sphere.shape[0]):
for j in range(x_sphere.shape[1]):
data_tmp = torch.Tensor([[x_sphere[i, j], y_sphere[i, j], z_sphere[i, j]]])
fmean[i, j] = function(data_tmp).detach().numpy()
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(fmean)
colors = fmean - min_colors
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = colors / max_colors
colors = pl.cm.inferno(np.ones(colors.shape) - colors)
if ys is not None:
colors_ys = pl.cm.inferno(np.ones(ys.shape) - (ys - min_colors) / max_colors)
# Plot x vs y
ax1 = fig.add_subplot(121, projection='3d')
# Make the panes transparent
ax1.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.plot_surface(x_sphere, y_sphere, fmean, rstride=8, cstride=8, facecolors=colors, alpha=alpha,
edgecolor='white', linewidth=0.3)
# ax1.plot_surface(x_sphere, y_sphere, fmean, rstride=8, cstride=8, color='deepskyblue', alpha=0.2,
# edgecolor='gray', linewidth=0.3)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
ax1.scatter(xs[n, 0], xs[n, 1], ys[n], c='darkblue', s=25)
# ax1.scatter(xs[n, 0], xs[n, 1], ys[n], c=colors_ys[n], s=25)
# Plot true minimum
if true_opt_x is not None and true_opt_y is not None:
ax1.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_y, s=100, c='limegreen', marker='*')
ax1.locator_params(axis='x', nbins=4)
ax1.locator_params(axis='y', nbins=4)
ax1.locator_params(axis='z', nbins=4)
ax1.tick_params(labelsize=16)
ax1.set_xlabel(r'$x_1$', fontsize=24)
ax1.set_ylabel(r'$x_2$', fontsize=24)
ax1.set_zlabel(r'$f(\bm{x})$', fontsize=24)
# ax1.zaxis.set_rotate_label(False) # disable automatic rotation
# ax1.set_zlabel('Cost value', fontsize=20, rotation=90)
# Plot y vs z
ax2 = fig.add_subplot(122, projection='3d')
# Make the panes transparent
ax2.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.plot_surface(y_sphere, z_sphere, fmean, rstride=8, cstride=8, facecolors=colors, alpha=0.2, edgecolor='white',
linewidth=0.3)
# ax2.plot_surface(y_sphere, z_sphere, fmean, rstride=8, cstride=8, color='deepskyblue', alpha=0.2,
# edgecolor='gray', linewidth=0.3)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
ax2.scatter(xs[n, 1], xs[n, 2], ys[n], c='darkblue', s=25)
# ax2.scatter(xs[n, 1], xs[n, 2], ys[n], c=colors_ys[n], s=25)
# Plot true minimum
if true_opt_x is not None and true_opt_y is not None:
ax2.scatter(true_opt_x[0, 1], true_opt_x[0, 2], true_opt_y, s=100, c='limegreen', marker='*')
ax2.locator_params(axis='x', nbins=4)
ax2.locator_params(axis='y', nbins=4)
ax2.locator_params(axis='z', nbins=4)
ax2.tick_params(labelsize=16)
ax2.set_xlabel(r'$x_2$', fontsize=24)
ax2.set_ylabel(r'$x_3$', fontsize=24)
ax2.set_zlabel(r'$f(\bm{x})$', fontsize=24)
# ax2.zaxis.set_rotate_label(False) # disable automatic rotation
# ax2.set_zlabel('Cost value', fontsize=20, rotation=90)
return ax1, ax2
def bo_plot_acquisition_sphere(ax, acq_fct, xs=None, opt_x=None, true_opt_x=None, alpha=0.4, elev=30, azim=-60,
n_elems=100):
"""
Plot an acquisition function at the surface of the sphere
Parameters
----------
:param ax: figure axis
:param acq_fct: acquisition function
Optional parameters
-------------------
:param xs: samples of the BO [n x 3]
:param opt_x: current best optimizer of the BO [1 x 3]
:param true_opt_x: true best optimizer [1 x 3]
:param alpha: transparency
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to approximate the sphere
Returns
-------
:return: -
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# Sphere
u = np.linspace(0, 2 * np.pi, n_elems)
v = np.linspace(0, np.pi, n_elems)
r = 1
x_sphere = r * np.outer(np.cos(u), np.sin(v))
y_sphere = r * np.outer(np.sin(u), np.sin(v))
z_sphere = r * np.outer(np.ones(np.size(u)), np.cos(v))
# Colors in function of acquisition function
colors = np.zeros(x_sphere.shape)
for i in range(x_sphere.shape[0]):
for j in range(x_sphere.shape[1]):
data_tmp = torch.Tensor([[x_sphere[i, j], y_sphere[i, j], z_sphere[i, j]]]).double()
colors[i, j] = acq_fct(data_tmp).detach().numpy()
colors = colors - np.min(colors)
colors = colors / np.max(colors)
colors = pl.cm.inferno(np.ones(colors.shape) - colors)
ax.plot_surface(x_sphere, y_sphere, z_sphere, rstride=4, cstride=4, facecolors=colors, linewidth=0., alpha=alpha)
# Plots xs
if xs is not None:
for n in range(xs.shape[0]):
ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2], c='k')
# Plot opt x
if opt_x is not None:
ax.scatter(opt_x[0, 0], opt_x[0, 1], opt_x[0, 2], s=30, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_x[0, 2], s=100, c='limegreen', marker='*')
# Limits
lim = 1.1
ax.set_xlim([-lim, lim])
ax.set_ylim([-lim, lim])
ax.set_zlim([-lim, lim])
def bo_plot_gp_sphere(ax, model, xs=None, opt_x=None, true_opt_x=None, true_opt_y=None, max_colors=None,
elev=30, azim=-60, n_elems=100):
"""
Plot a GP at the surface of the sphere
Parameters
----------
:param ax: figure axis
:param model: GP model
Optional parameters
-------------------
:param xs: observations of the GP (samples of the BO) [n x 3]
:param opt_x: current best optimizer (of the BO) [1 x 3]
:param true_opt_x: true optimum [1 x 3]
:param true_opt_y: true optimum value
:param max_colors: maximum value (to bound the colors)
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to approximate the sphere
Returns
-------
:return: -
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
# ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# Sphere
u = np.linspace(0, 2 * np.pi, n_elems)
v = np.linspace(0, np.pi, n_elems)
r = 1
x_sphere = r * np.outer(np.cos(u), np.sin(v))
y_sphere = r * np.outer(np.sin(u), np.sin(v))
z_sphere = r * np.outer(np.ones(np.size(u)), np.cos(v))
# Colors in function of acquisition function
colors = np.zeros(x_sphere.shape)
for i in range(x_sphere.shape[0]):
for j in range(x_sphere.shape[1]):
data_tmp = torch.Tensor([[x_sphere[i, j], y_sphere[i, j], z_sphere[i, j]]]).double()
colors[i, j] = model(data_tmp).mean.detach().numpy()
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(colors)
colors = colors - min_colors
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = colors/max_colors
colors = pl.cm.inferno(np.ones(colors.shape) - colors)
ax.plot_surface(x_sphere, y_sphere, z_sphere, rstride=4, cstride=4, facecolors=colors, linewidth=0., alpha=0.4)
# Plots xs
if xs is not None:
for n in range(xs.shape[0]):
ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2], c='k')
# Plot opt x
if opt_x is not None:
ax.scatter(opt_x[0, 0], opt_x[0, 1], opt_x[0, 2], s=50, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_x[0, 2], s=100, c='limegreen', marker='*')
# Limits
lim = 1.1
ax.set_xlim([-lim, lim])
ax.set_ylim([-lim, lim])
ax.set_zlim([-lim, lim])
# Labels
ax.locator_params(axis='x', nbins=4)
ax.locator_params(axis='y', nbins=4)
ax.locator_params(axis='z', nbins=4)
ax.tick_params(labelsize=16)
ax.set_xlabel(r'$x_1$', fontsize=24)
ax.set_ylabel(r'$x_2$', fontsize=24)
ax.set_zlabel(r'$x_3$', fontsize=24)
def bo_plot_gp_sphere_planar(fig, model, var_fact=2., xs=None, ys=None, opt_x=None, opt_y=None, true_opt_x=None,
true_opt_y = None, max_colors=None, n_elems=100):
"""
Plot a GP on the sphere as 2d-projections
Parameters
----------
:param fig: figure
:param model: GP model
Optional parameters
-------------------
:param var_fact: displayed variance factor
:param xs: observations of the GP (samples of the BO) [n x 3]
:param ys: value of the observations
:param opt_x: current best optimizer (of the BO) [1 x 3]
:param opt_y: value of the current best optimizer (of the BO)
:param true_opt_x: true optimum [1 x 3]
:param true_opt_y: true optimum value
:param max_colors: maximum value (to bound the colors)
:param n_elems: number of elements to approximate the sphere
Returns
-------
:return: axis of the two subplots
"""
# Sphere
u = np.linspace(0, 2 * np.pi, n_elems)
v = np.linspace(0, np.pi, n_elems)
r = 1
x_sphere = r * np.outer(np.cos(u), np.sin(v))
y_sphere = r * np.outer(np.sin(u), np.sin(v))
z_sphere = r * np.outer(np.ones(np.size(u)), np.cos(v))
# Values and color in function of function
fmean = np.zeros(x_sphere.shape)
fvar = np.zeros(x_sphere.shape)
for i in range(x_sphere.shape[0]):
for j in range(x_sphere.shape[1]):
data_tmp = torch.tensor([[x_sphere[i, j], y_sphere[i, j], z_sphere[i, j]]]).double()
data_tmp_vals = model(data_tmp)
fmean[i, j] = data_tmp_vals.mean.detach().numpy()
fvar[i, j] = data_tmp_vals.variance.detach().numpy()
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(fmean)
colors = fmean - min_colors
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = colors / max_colors
colors = pl.cm.inferno(np.ones(colors.shape) - colors)
if ys is not None:
colors_ys = pl.cm.inferno(np.ones(ys.shape) - (ys - min_colors) / max_colors)
# Plot x vs y
ax1 = fig.add_subplot(121, projection='3d')
# Make the panes transparent
ax1.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax1.plot_surface(x_sphere, y_sphere, fmean, rstride=8, cstride=8, facecolors=colors, alpha=0.2, edgecolor='white',
linewidth=0.3)
# ax1.plot_surface(x_sphere, y_sphere, fmean, rstride=8, cstride=8, color='deepskyblue', alpha=0.2,
# edgecolor='gray', linewidth=0.3)
ax1.plot_surface(x_sphere, y_sphere, fmean + var_fact*fvar, rstride=4, cstride=4, color=[0.5, 0.5, 0.5], alpha=0.1)
ax1.plot_surface(x_sphere, y_sphere, fmean - var_fact*fvar, rstride=4, cstride=4, color=[0.5, 0.5, 0.5], alpha=0.1)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
# ax1.scatter(xs[n, 0], xs[n, 1], ys[n], c='darkblue', s=25)
ax1.scatter(xs[n, 0], xs[n, 1], ys[n], c=colors_ys[n], s=25)
# Plot opt x
if opt_x is not None and opt_y is not None:
ax1.scatter(opt_x[0, 0], opt_x[0, 1], opt_y, s=50, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None and true_opt_y is not None:
ax1.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_y, s=100, c='limegreen', marker='*')
ax1.locator_params(axis='x', nbins=4)
ax1.locator_params(axis='y', nbins=4)
ax1.locator_params(axis='z', nbins=4)
ax1.tick_params(labelsize=16)
ax1.set_xlabel(r'$x_1$', fontsize=24)
ax1.set_ylabel(r'$x_2$', fontsize=24)
ax1.set_zlabel(r'$f(\bm{x})$', fontsize=24)
# ax1.zaxis.set_rotate_label(False) # disable automatic rotation
# ax1.set_zlabel('Cost value', fontsize=20, rotation=90)
# Plot y vs z
ax2 = fig.add_subplot(122, projection='3d')
# Make the panes transparent
ax2.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax2.plot_surface(y_sphere, z_sphere, fmean, rstride=8, cstride=8, facecolors=colors, alpha=0.2,
edgecolor='white', linewidth=0.3)
# ax2.plot_surface(y_sphere, z_sphere, fmean, rstride=8, cstride=8, color='deepskyblue', alpha=0.2,
# edgecolor='gray', linewidth=0.3)
ax2.plot_surface(y_sphere, z_sphere, fmean + var_fact*fvar, rstride=4, cstride=4, color=[0.5, 0.5, 0.5], alpha=0.1)
ax2.plot_surface(y_sphere, z_sphere, fmean - var_fact*fvar, rstride=4, cstride=4, color=[0.5, 0.5, 0.5], alpha=0.1)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
# ax2.scatter(xs[n, 1], xs[n, 2], ys[n], c='darkblue', s=25)
ax2.scatter(xs[n, 1], xs[n, 2], ys[n], c=colors_ys[n], s=25)
# Plot opt x
if opt_x is not None and opt_y is not None:
ax2.scatter(opt_x[0, 1], opt_x[0, 2], opt_y, s=50, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None and true_opt_y is not None:
ax2.scatter(true_opt_x[0, 1], true_opt_x[0, 2], true_opt_y, s=100, c='limegreen', marker='*')
ax2.locator_params(axis='x', nbins=4)
ax2.locator_params(axis='y', nbins=4)
ax2.locator_params(axis='z', nbins=4)
ax2.tick_params(labelsize=16)
ax2.set_xlabel(r'$x_2$', fontsize=24)
ax2.set_ylabel(r'$x_3$', fontsize=24)
ax2.set_zlabel(r'$f(\bm{x})$', fontsize=24)
# ax2.zaxis.set_rotate_label(False) # disable automatic rotation
# ax2.set_zlabel('Cost value', fontsize=20, rotation=90)
return ax1, ax2
def bo_plot_function_spd(ax, function, r_cone, true_opt_x=None, true_opt_y=None, chol=False, max_colors=None,
alpha=0.3, elev=10, azim=-20, n_elems=100, n_elems_h=10):
"""
Plot a function in the SPD cone
Parameters
----------
:param ax: figure axis
:param function: function
:param r_cone: cone radius
Optional parameters
-------------------
:param true_opt_x: true minimum point on the manifold [1 x 3]
:param true_opt_y: true minimum value
:param chol: if True, the Cholesky decomposition is used
:param max_colors: maximum value (to bound the colors)
:param alpha: transparency
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to plot in a slice of the cone
:param n_elems_h: number of slices of the cone to plot
Returns
-------
:return: max_colors
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# ax.view_init(elev=10, azim=50.)
# Plot SPD cone
plot_spd_cone(ax, r=r_cone, lim_fact=0.8)
# Values of test function for points on the manifold
phi = np.linspace(0, 2 * np.pi, n_elems)
# Matrix for rotation of 45° of the cone
dir = np.cross(np.array([1, 0, 0]), np.array([1., 1., 0.]))
R = rotation_matrix_from_axis_angle(dir, np.pi / 4.)
# Points of the cone
h = np.linspace(0.01, r_cone, n_elems_h)
x_cone = np.zeros((n_elems_h, n_elems, n_elems))
y_cone = np.zeros((n_elems_h, n_elems, n_elems))
z_cone = np.zeros((n_elems_h, n_elems, n_elems))
colors = np.zeros((n_elems_h, n_elems, n_elems))
for k in range(n_elems_h):
r = np.linspace(0, h[k] - 0.01, n_elems)
for i in range(n_elems):
# Points on a plane cutting the cone
xyz = np.vstack((h[k] * np.ones(n_elems), r[i] * np.sin(phi), r[i] / np.sqrt(2) * np.cos(phi)))
# Rotation
xyz = R.dot(xyz)
# Coordinates
x_cone[k, i] = xyz[0]
y_cone[k, i] = xyz[1]
z_cone[k, i] = xyz[2]
# Compute the function values at given points
for i in range(n_elems):
for j in range(n_elems):
if not chol:
data_tmp = torch.tensor([[x_cone[k, i, j], y_cone[k, i, j], z_cone[k, i, j] * np.sqrt(2)]]).double()
colors[k, i, j] = function(data_tmp).detach().numpy()
else:
indices = np.tril_indices(2)
data_tmp = np.array([[x_cone[k, i, j], z_cone[k, i, j]], [z_cone[k, i, j], y_cone[k, i, j]]])
data_chol_tmp = torch.tensor(np.linalg.cholesky(data_tmp), dtype=torch.float64)
colors[k, i, j] = function(data_chol_tmp[indices]).detach().numpy()
# Rescale the colors
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(colors)
colors = (colors - min_colors)
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = colors / max_colors
# Plot surfaces
for k in range(n_elems_h):
colors_plot = pl.cm.inferno(np.ones((n_elems, n_elems)) - colors[k])
ax.plot_surface(x_cone[k], y_cone[k], z_cone[k], rstride=4, cstride=4, facecolors=colors_plot, linewidth=0., alpha=alpha)
# Plot optimal point
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[1, 1], true_opt_x[0, 1], s=100, c='g', marker='*')
return max_colors
def bo_plot_acquisition_spd(ax, acq_fct, r_cone, xs=None, ys=None, opt_x=None, true_opt_x=None, chol=False, alpha=0.3,
elev=10, azim=-20, n_elems=100, n_elems_h=10):
"""
Plot an acquisition function in the SPD cone
Parameters
----------
:param ax: figure axis
:param acq_fct: acquisition function
:param r_cone: cone radius
Optional parameters
-------------------
:param xs: samples of the BO [n x 3]
:param ys: value of the samples of the BO
:param opt_x: current best optimizer of the BO [1 x 3]
:param true_opt_x: true minimum point [1 x 3]
:param chol: if True, the Cholesky decomposition is used
:param alpha: transparency
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to plot in a slice of the cone
:param n_elems_h: number of slices of the cone to plot
Returns
-------
:return: -
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# Plot SPD cone
plot_spd_cone(ax, r=r_cone, lim_fact=0.8)
# Values of test function for points on the manifold
phi = np.linspace(0, 2 * np.pi, n_elems)
# Matrix for rotation of 45° of the cone
dir = np.cross(np.array([1, 0, 0]), np.array([1., 1., 0.]))
R = rotation_matrix_from_axis_angle(dir, np.pi / 4.)
# Points of the cone
h = np.linspace(0.01, r_cone, n_elems_h)
x_cone = np.zeros((n_elems_h, n_elems, n_elems))
y_cone = np.zeros((n_elems_h, n_elems, n_elems))
z_cone = np.zeros((n_elems_h, n_elems, n_elems))
colors = np.zeros((n_elems_h, n_elems, n_elems))
for k in range(n_elems_h):
r = np.linspace(0, h[k] - 0.01, n_elems)
for i in range(n_elems):
# Points on a plane cutting the cone
xyz = np.vstack((h[k] * np.ones(n_elems), r[i] * np.sin(phi), r[i] / np.sqrt(2) * np.cos(phi)))
# Rotation
xyz = R.dot(xyz)
# Coordinates
x_cone[k, i] = xyz[0]
y_cone[k, i] = xyz[1]
z_cone[k, i] = xyz[2]
for i in range(n_elems):
for j in range(n_elems):
if not chol:
data_tmp = torch.tensor([[x_cone[k, i, j], y_cone[k, i, j], z_cone[k, i, j] * np.sqrt(2)]]).double()
colors[k, i, j] = acq_fct(data_tmp).detach().numpy()
else:
indices = np.tril_indices(2)
data_tmp = np.array([[x_cone[k, i, j], z_cone[k, i, j]], [z_cone[k, i, j], y_cone[k, i, j]]])
data_chol_tmp = torch.tensor(np.linalg.cholesky(data_tmp), dtype=torch.float64)
colors[k, i, j] = acq_fct(data_chol_tmp[indices][None]).detach().numpy()
min_colors = np.min(colors)
colors = (colors - min_colors)
max_colors = np.max(colors)
colors = np.min([max_colors * np.ones(colors.shape), colors], axis=0)
colors = colors / max_colors
if ys is not None:
colors_ys = pl.cm.inferno(np.ones(ys.shape) - (ys - min_colors) / max_colors)
for k in range(n_elems_h):
colors_plot = pl.cm.inferno(np.ones((n_elems, n_elems)) - colors[k])
ax.plot_surface(x_cone[k], y_cone[k], z_cone[k], rstride=4, cstride=4, facecolors=colors_plot, linewidth=0.,
alpha=alpha)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2] / np.sqrt(2), s=30, c='k')
# ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2] / np.sqrt(2), s=30, c=colors_ys[n])
# Plot opt x
if opt_x is not None:
ax.scatter(opt_x[0, 0], opt_x[0, 1], opt_x[0, 2] / np.sqrt(2), s=60, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_x[0, 2] / np.sqrt(2), s=100, c='g', marker='*')
def bo_plot_gp_spd(ax, model, r_cone, xs=None, ys=None, opt_x=None, true_opt_x=None, true_opt_y=None, chol=False,
max_colors=None, alpha=0.3, elev=10, azim=-20, n_elems=100, n_elems_h=10):
"""
Plot a GP in the SPD cone
Parameters
----------
:param ax: figure axis
:param model: GP model
:param r_cone: cone radius
Optional parameters
-------------------
:param xs: samples of the BO [n x 3]
:param ys: value of the samples of the BO
:param opt_x: current best optimizer of the BO [1 x 3]
:param true_opt_x: true minimum point [1 x 3]
:param true_opt_y: true minimum value
:param chol: if True, the Cholesky decomposition is used
:param max_colors: maximum value (to bound the colors)
:param alpha: transparency
:param elev: axis elevation
:param azim: axis azimut
:param n_elems: number of elements to plot in a slice of the cone
:param n_elems_h: number of slices of the cone to plot
Returns
-------
:return: -
"""
# Make the panes transparent
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
# Make the grid lines transparent
ax.xaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.yaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
ax.zaxis._axinfo["grid"]['color'] = (1, 1, 1, 0)
# Remove axis
ax._axis3don = False
# Initial view
ax.view_init(elev=elev, azim=azim)
# Plot SPD cone
plot_spd_cone(ax, r=r_cone, lim_fact=0.8)
# Value of test function for points on the manifold
# Values of test function for points on the manifold
phi = np.linspace(0, 2 * np.pi, n_elems)
# Matrix for rotation of 45° of the cone
dir = np.cross(np.array([1, 0, 0]), np.array([1., 1., 0.]))
R = rotation_matrix_from_axis_angle(dir, np.pi / 4.)
# Points of the cone
h = np.linspace(0.01, r_cone, n_elems_h)
x_cone = np.zeros((n_elems_h, n_elems, n_elems))
y_cone = np.zeros((n_elems_h, n_elems, n_elems))
z_cone = np.zeros((n_elems_h, n_elems, n_elems))
colors = np.zeros((n_elems_h, n_elems, n_elems))
var = np.zeros((n_elems_h, n_elems, n_elems))
for k in range(n_elems_h):
r = np.linspace(0, h[k] - 0.01, n_elems)
for i in range(n_elems):
# Points on a plane cutting the cone
xyz = np.vstack((h[k] * np.ones(n_elems), r[i] * np.sin(phi), r[i] / np.sqrt(2) * np.cos(phi)))
# Rotation
xyz = R.dot(xyz)
# Coordinates
x_cone[k, i] = xyz[0]
y_cone[k, i] = xyz[1]
z_cone[k, i] = xyz[2]
for i in range(n_elems):
for j in range(n_elems):
if not chol:
data_tmp = torch.tensor([[x_cone[k, i, j], y_cone[k, i, j], z_cone[k, i, j] * np.sqrt(2)]]).double()
data_tmp_vals = model(data_tmp)
colors[k, i, j] = data_tmp_vals.mean.detach().numpy()
var[k, i, j] = data_tmp_vals.variance.detach().numpy()
else:
indices = np.tril_indices(2)
data_tmp = np.array([[x_cone[k, i, j], z_cone[k, i, j]], [z_cone[k, i, j], y_cone[k, i, j]]])
data_chol_tmp = np.linalg.cholesky(data_tmp)
data_tmp_vals = model(data_chol_tmp[indices][None])
colors[k, i, j] = data_tmp_vals.mean.detach().numpy()
var[k, i, j] = data_tmp_vals.variance.detach().numpy()
if true_opt_y is not None:
min_colors = true_opt_y
else:
min_colors = np.min(colors)
colors = (colors - min_colors)
if max_colors is None:
max_colors = np.max(colors)
else:
np.min([colors, max_colors * np.ones(colors.shape)], axis=0)
colors = colors / max_colors
if ys is not None:
colors_ys = pl.cm.inferno(np.ones(ys.shape) - (ys - min_colors) / max_colors)
for k in range(n_elems_h):
colors_plot = pl.cm.inferno(np.ones((n_elems, n_elems)) - colors[k])
ax.plot_surface(x_cone[k], y_cone[k], z_cone[k], rstride=4, cstride=4, facecolors=colors_plot, linewidth=0., alpha=alpha)
# Plots xs
if xs is not None and ys is not None:
for n in range(xs.shape[0]):
ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2] / np.sqrt(2), s=30, c='k')
# ax.scatter(xs[n, 0], xs[n, 1], xs[n, 2] / np.sqrt(2), s=30, c=colors_ys[n])
# Plot opt x
if opt_x is not None:
ax.scatter(opt_x[0, 0], opt_x[0, 1], opt_x[0, 2] / np.sqrt(2), s=60, c='deepskyblue', marker='D')
# Plot true minimum
if true_opt_x is not None:
ax.scatter(true_opt_x[0, 0], true_opt_x[0, 1], true_opt_x[0, 2] / np.sqrt(2), s=100, c='g', marker='*')
def bo_plot_gp_spd_planar(fig, model, r_cone, var_fact=2., xs=None, ys=None, opt_x=None, opt_y=None, true_opt_x=None,
true_opt_y=None, max_colors=None, n_elems=10):
return 0
| 35.998927
| 129
| 0.593723
| 5,689
| 33,551
| 3.346985
| 0.048163
| 0.011974
| 0.009453
| 0.012604
| 0.945644
| 0.932672
| 0.919595
| 0.908723
| 0.895646
| 0.88509
| 0
| 0.042259
| 0.255194
| 33,551
| 931
| 130
| 36.037594
| 0.719597
| 0.255194
| 0
| 0.832589
| 0
| 0
| 0.021659
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.020089
| false
| 0
| 0.013393
| 0.002232
| 0.044643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
58ad3156ec908bcf8e0fea57fa31ac9c62cec78c
| 147
|
py
|
Python
|
tests/project/tests/test_assert.py
|
Igorxp5/py-ekstazi
|
69a6d1542fd17fae792ca91f7175196df1bab41f
|
[
"MIT"
] | 4
|
2022-03-25T10:42:16.000Z
|
2022-03-31T01:16:06.000Z
|
tests/project/tests/test_assert.py
|
Igorxp5/py-ekstazi
|
69a6d1542fd17fae792ca91f7175196df1bab41f
|
[
"MIT"
] | 10
|
2022-03-19T20:00:29.000Z
|
2022-03-20T14:32:14.000Z
|
tests/project/tests/test_assert.py
|
Igorxp5/pytest-ekstazi
|
69a6d1542fd17fae792ca91f7175196df1bab41f
|
[
"MIT"
] | null | null | null |
def test_assert_false():
assert False, 'The assert should fail' # xfail
def test_assert_passed():
assert True, 'The assert should pass'
| 21
| 51
| 0.714286
| 21
| 147
| 4.809524
| 0.52381
| 0.138614
| 0.257426
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197279
| 147
| 6
| 52
| 24.5
| 0.855932
| 0.034014
| 0
| 0
| 0
| 0
| 0.314286
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
58cc4c28b45bb5d52e7ecc8b4defee22deb50364
| 47
|
py
|
Python
|
gym_recorder/__init__.py
|
bryanoliveira/gym-recorder
|
54e89e8efc003c15a49722fe3b0443342653af3d
|
[
"MIT"
] | null | null | null |
gym_recorder/__init__.py
|
bryanoliveira/gym-recorder
|
54e89e8efc003c15a49722fe3b0443342653af3d
|
[
"MIT"
] | null | null | null |
gym_recorder/__init__.py
|
bryanoliveira/gym-recorder
|
54e89e8efc003c15a49722fe3b0443342653af3d
|
[
"MIT"
] | null | null | null |
from .wrapper import TransitionRecorderWrapper
| 23.5
| 46
| 0.893617
| 4
| 47
| 10.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.976744
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4517ea76d7b2b85797c8c931f04d8bc01114f9c3
| 10,858
|
py
|
Python
|
tests/time_based_scene_switch_test.py
|
Robert1991/appdaemon
|
ba346d5b79d24ae7684390e717c1030e317d7600
|
[
"Unlicense"
] | null | null | null |
tests/time_based_scene_switch_test.py
|
Robert1991/appdaemon
|
ba346d5b79d24ae7684390e717c1030e317d7600
|
[
"Unlicense"
] | null | null | null |
tests/time_based_scene_switch_test.py
|
Robert1991/appdaemon
|
ba346d5b79d24ae7684390e717c1030e317d7600
|
[
"Unlicense"
] | null | null | null |
from apps.lights.time_based_scene_switch import TimeBasedSceneSwitch
from appdaemon.plugins.hass.hassapi import Hass
from appdaemontestframework.hass_mocks import MockHandler
import pytest
from appdaemontestframework import automation_fixture
from datetime import time
import mock
from mock import patch
from mock import Mock
@automation_fixture(TimeBasedSceneSwitch)
def scene_switch(given_that, hass_mocks):
given_that.passed_arg('light_group') \
.is_set_to('light.some_light_group')
given_that.passed_arg('scene_switch_input_select') \
.is_set_to('input_select.some_time_based_scenes')
given_that.passed_arg('light_automatic_enabled') \
.is_set_to('input_boolean.some_light_automatic_switch')
given_that.passed_arg('toggled_scene_input_select') \
.is_set_to('input_select.some_scene_toggle_input_select')
given_that.passed_arg('scene_group_prefix') \
.is_set_to('some_room')
given_that.passed_arg('light_automatic_enabled') \
.is_set_to('input_boolean.some_light_automatic_switch')
TimeBasedSceneSwitch.initialize_on_creation = False
TimeBasedSceneSwitch.scene_utils = Mock()
def test_refresh_listeners_check_that_scene_switch_timer_are_set_check_current_scene_activated(given_that, scene_switch, assert_that, time_travel):
given_that.time_is(time(hour=20))
given_that.state_of('light.some_light_group') \
.is_set_to('on')
given_that.state_of('input_datetime.work_light_start_time') \
.is_set_to('08:30:00')
given_that.state_of('input_select.work_light_input_select') \
.is_set_to('Work Light')
given_that.state_of('input_datetime.night_light_start_time') \
.is_set_to('22:45:00')
given_that.state_of('input_select.night_light_input_select') \
.is_set_to('Night Light')
given_that.state_of('input_select.some_time_based_scenes') \
.is_set_to('work_light_start_time/Work Light',
{'options': ['work_light_start_time/work_light_input_select',
'night_light_start_time/night_light_input_select']})
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as mock:
scene_switch.refresh_listeners(None, None, None, None, None)
assert_that(scene_switch) \
.registered.run_daily(time(hour=8, minute=30), scene="Work Light") \
.with_callback(scene_switch.toggle_scene)
assert_that(scene_switch) \
.listens_to.state("input_datetime.work_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.work_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.registered.run_daily(time(hour=22, minute=45), scene="Night Light") \
.with_callback(scene_switch.toggle_scene)
assert_that(scene_switch) \
.listens_to.state("input_datetime.night_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.night_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Work Light")
scene_switch.scene_utils.turn_on_current_scene.assert_called_once_with(
"some_room", "input_select.some_scene_toggle_input_select")
def test_refresh_listeners_check_that_scene_switch_timer_are_set_check_current_scene_activated_during_night(given_that, scene_switch, assert_that, time_travel):
given_that.state_of('light.some_light_group') \
.is_set_to('off')
given_that.time_is(time(hour=1))
given_that.state_of('input_datetime.work_light_start_time') \
.is_set_to('08:30:00')
given_that.state_of('input_select.work_light_input_select') \
.is_set_to('Work Light')
given_that.state_of('input_datetime.night_light_start_time') \
.is_set_to('22:45:00')
given_that.state_of('input_select.night_light_input_select') \
.is_set_to('Night Light')
given_that.state_of('input_select.some_time_based_scenes') \
.is_set_to('work_light_start_time/work_light_input_select',
{'options': ['night_light_start_time/night_light_input_select',
'work_light_start_time/work_light_input_select']})
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as mock:
scene_switch.refresh_listeners(None, None, None, None, None)
assert_that(scene_switch) \
.registered.run_daily(time(hour=8, minute=30), scene="Work Light") \
.with_callback(scene_switch.toggle_scene)
assert_that(scene_switch) \
.listens_to.state("input_datetime.work_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.work_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.registered.run_daily(time(hour=22, minute=45), scene="Night Light") \
.with_callback(scene_switch.toggle_scene)
assert_that(scene_switch) \
.listens_to.state("input_datetime.night_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.night_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Night Light")
scene_switch.scene_utils.turn_on_current_scene.assert_not_called()
def test_refresh_listeners_listeners_updated_on_next_call(given_that, scene_switch, assert_that, time_travel):
given_that.state_of('light.some_light_group') \
.is_set_to('off')
given_that.time_is(time(hour=8))
given_that.state_of('input_select.some_time_based_scenes') \
.is_set_to('work_light_start_time/work_light_input_select',
{'options': ['work_light_start_time/work_light_input_select']})
given_that.state_of('input_datetime.work_light_start_time') \
.is_set_to('08:30:00')
given_that.state_of('input_select.work_light_input_select') \
.is_set_to('Work Light')
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as mock:
scene_switch.refresh_listeners(None, None, None, None, None)
assert_that(scene_switch) \
.listens_to.state("input_datetime.work_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.work_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Work Light")
given_that.mock_functions_are_cleared()
given_that.state_of('input_select.some_time_based_scenes') \
.is_set_to('night_light_start_time/night_light_input_select',
{'options': ['night_light_start_time/night_light_input_select']})
given_that.state_of('input_datetime.night_light_start_time') \
.is_set_to('10:30:00')
given_that.state_of('input_select.night_light_input_select') \
.is_set_to('Night Light')
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as select_option_mock:
with patch('appdaemon.plugins.hass.hassapi.Hass.cancel_listen_state'):
scene_switch.refresh_listeners(None, None, None, None, None)
assert_that(scene_switch) \
.registered.run_daily(time(hour=10, minute=30), scene="Night Light") \
.with_callback(scene_switch.toggle_scene)
select_option_mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Night Light")
assert_that(scene_switch) \
.listens_to.state("input_datetime.night_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
assert_that(scene_switch) \
.listens_to.state("input_select.night_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
scene_switch.scene_utils.turn_on_current_scene.assert_not_called()
with pytest.raises(AssertionError):
assert_that(scene_switch) \
.listens_to.state("input_datetime.work_light_start_time") \
.with_callback(scene_switch.refresh_listeners)
with pytest.raises(AssertionError):
assert_that(scene_switch) \
.listens_to.state("input_select.work_light_input_select") \
.with_callback(scene_switch.refresh_listeners)
with pytest.raises(AssertionError):
assert_that(scene_switch) \
.registered.run_daily(time(hour=8, minute=30), scene="Work Light") \
.with_callback(scene_switch.toggle_scene)
def test_refresh_listeners_not_registered_when_input_select_entry_malformed(given_that, scene_switch, assert_that, time_travel):
given_that.state_of('input_datetime.work_light_start_time') \
.is_set_to('08:30:00')
given_that.state_of('input_select.some_time_based_scenes') \
.is_set_to('work_light_start_time/Work Light',
{'options': ['work_light_start_time_Work Light']})
scene_switch.refresh_listeners(None, None, None, None, None)
scene_switch.scene_utils.turn_on_current_scene.assert_not_called()
with pytest.raises(AssertionError):
assert_that(scene_switch) \
.registered.run_daily(time(hour=8, minute=30), scene="Work Light") \
.with_callback(scene_switch.toggle_scene)
def test_toggle_scene(given_that, scene_switch, assert_that, time_travel):
given_that.state_of('light.some_light_group') \
.is_set_to('off')
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as mock:
scene_switch.toggle_scene({"scene": "Scene Name"})
mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Scene Name")
scene_switch.scene_utils.turn_on_current_scene.assert_not_called()
def test_toggle_scene_when_light_group_is_on(given_that, scene_switch, assert_that, time_travel):
given_that.state_of('light.some_light_group') \
.is_set_to('on')
with patch('appdaemon.plugins.hass.hassapi.Hass.select_option') as mock:
scene_switch.toggle_scene({"scene": "Scene Name"})
mock.assert_called_with(
"input_select.some_scene_toggle_input_select", "Scene Name")
scene_switch.scene_utils.turn_on_current_scene.assert_called_once_with(
"some_room", "input_select.some_scene_toggle_input_select")
| 50.268519
| 161
| 0.726929
| 1,448
| 10,858
| 4.967541
| 0.071133
| 0.100931
| 0.028222
| 0.051161
| 0.894898
| 0.883637
| 0.879605
| 0.870986
| 0.8518
| 0.831642
| 0
| 0.007786
| 0.171947
| 10,858
| 215
| 162
| 50.502326
| 0.792237
| 0
| 0
| 0.767196
| 0
| 0
| 0.294842
| 0.256695
| 0
| 0
| 0
| 0
| 0.227513
| 1
| 0.037037
| false
| 0.031746
| 0.047619
| 0
| 0.084656
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
189a8143e6eab50513d3340aefaf0e8ccc7a4e3e
| 298
|
py
|
Python
|
src/linearized_nns/from_neural_kernels/__init__.py
|
maxkvant/LinearizedNNs
|
eb0198be70ca55e7463b97a5023d2f6ffe0f8ba6
|
[
"Apache-2.0"
] | 1
|
2020-06-05T15:26:41.000Z
|
2020-06-05T15:26:41.000Z
|
src/linearized_nns/from_neural_kernels/__init__.py
|
maxkvant/LinearizedNNs
|
eb0198be70ca55e7463b97a5023d2f6ffe0f8ba6
|
[
"Apache-2.0"
] | null | null | null |
src/linearized_nns/from_neural_kernels/__init__.py
|
maxkvant/LinearizedNNs
|
eb0198be70ca55e7463b97a5023d2f6ffe0f8ba6
|
[
"Apache-2.0"
] | null | null | null |
from linearized_nns.from_neural_kernels.custom_tensor_dataset import CustomTensorDataset
from linearized_nns.from_neural_kernels.transforms import Cutout, RandomCrop
from linearized_nns.from_neural_kernels.utils import to_zca
from linearized_nns.from_neural_kernels.cifar_zca import get_cifar_zca
| 49.666667
| 88
| 0.909396
| 43
| 298
| 5.883721
| 0.418605
| 0.221344
| 0.268775
| 0.332016
| 0.537549
| 0.537549
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060403
| 298
| 5
| 89
| 59.6
| 0.903571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
189b5182961b3f4f3204e13381b5f64b8ca3c340
| 113
|
py
|
Python
|
compliance_checker/cf/__init__.py
|
duncombe/compliance-checker
|
424c762c68c215a4548ff2006716ad5594605ee7
|
[
"Apache-2.0"
] | null | null | null |
compliance_checker/cf/__init__.py
|
duncombe/compliance-checker
|
424c762c68c215a4548ff2006716ad5594605ee7
|
[
"Apache-2.0"
] | null | null | null |
compliance_checker/cf/__init__.py
|
duncombe/compliance-checker
|
424c762c68c215a4548ff2006716ad5594605ee7
|
[
"Apache-2.0"
] | null | null | null |
from compliance_checker.cf.cf import *
from compliance_checker.cf.appendix_d import dimless_vertical_coordinates
| 37.666667
| 73
| 0.884956
| 16
| 113
| 5.9375
| 0.625
| 0.294737
| 0.442105
| 0.484211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070796
| 113
| 2
| 74
| 56.5
| 0.904762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
18d40485dacd6915f927171a665b8e760a20b25c
| 42
|
py
|
Python
|
samples/src/main/resources/datasets/python/19.py
|
sritchie/kotlingrad
|
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
|
[
"Apache-2.0"
] | 11
|
2020-12-19T01:19:44.000Z
|
2021-12-25T20:43:33.000Z
|
src/main/resources/datasets/python/19.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | null | null | null |
src/main/resources/datasets/python/19.py
|
breandan/katholic
|
081c39f3acc73ff41f5865563debe78a36e1038f
|
[
"Apache-2.0"
] | 2
|
2021-01-25T07:59:20.000Z
|
2021-08-07T07:13:49.000Z
|
def power2():
return 2 ** 3 ** 4 ** 5
| 14
| 27
| 0.452381
| 7
| 42
| 2.714286
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.178571
| 0.333333
| 42
| 2
| 28
| 21
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
e1594cd56207b55ee16b243286a04ad5590abc98
| 3,806
|
py
|
Python
|
test/espnet2/enh/separator/test_skim_separator.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 2
|
2022-02-24T09:22:57.000Z
|
2022-02-24T09:38:02.000Z
|
test/espnet2/enh/separator/test_skim_separator.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 2
|
2019-04-23T04:43:33.000Z
|
2019-05-13T13:06:52.000Z
|
test/espnet2/enh/separator/test_skim_separator.py
|
texpomru13/espnet
|
7ef005e832e2fb033f356c16f54e0f08762fb4b0
|
[
"Apache-2.0"
] | 1
|
2022-03-18T21:02:16.000Z
|
2022-03-18T21:02:16.000Z
|
import pytest
import torch
from torch import Tensor
from torch_complex import ComplexTensor
from espnet2.enh.separator.skim_separator import SkiMSeparator
@pytest.mark.parametrize("input_dim", [5])
@pytest.mark.parametrize("layer", [1, 3])
@pytest.mark.parametrize("causal", [True, False])
@pytest.mark.parametrize("unit", [8])
@pytest.mark.parametrize("dropout", [0.0, 0.2])
@pytest.mark.parametrize("num_spk", [1, 2])
@pytest.mark.parametrize("nonlinear", ["relu", "sigmoid", "tanh"])
@pytest.mark.parametrize("mem_type", ["hc", "c", "h", None])
@pytest.mark.parametrize("segment_size", [2, 4])
@pytest.mark.parametrize("seg_overlap", [False, True])
def test_skim_separator_forward_backward_complex(
input_dim,
layer,
causal,
unit,
dropout,
num_spk,
nonlinear,
mem_type,
segment_size,
seg_overlap,
):
model = SkiMSeparator(
input_dim=input_dim,
causal=causal,
num_spk=num_spk,
nonlinear=nonlinear,
layer=layer,
unit=unit,
segment_size=segment_size,
dropout=dropout,
mem_type=mem_type,
seg_overlap=seg_overlap,
)
model.train()
real = torch.rand(2, 10, input_dim)
imag = torch.rand(2, 10, input_dim)
x = ComplexTensor(real, imag)
x_lens = torch.tensor([10, 8], dtype=torch.long)
masked, flens, others = model(x, ilens=x_lens)
assert isinstance(masked[0], ComplexTensor)
assert len(masked) == num_spk
masked[0].abs().mean().backward()
@pytest.mark.parametrize("input_dim", [5])
@pytest.mark.parametrize("layer", [1, 3])
@pytest.mark.parametrize("causal", [True, False])
@pytest.mark.parametrize("unit", [8])
@pytest.mark.parametrize("dropout", [0.0, 0.2])
@pytest.mark.parametrize("num_spk", [1, 2])
@pytest.mark.parametrize("nonlinear", ["relu", "sigmoid", "tanh"])
@pytest.mark.parametrize("mem_type", ["hc", "c", "h", "id", None])
@pytest.mark.parametrize("segment_size", [2, 4])
@pytest.mark.parametrize("seg_overlap", [False, True])
def test_skim_separator_forward_backward_real(
input_dim,
layer,
causal,
unit,
dropout,
num_spk,
nonlinear,
mem_type,
segment_size,
seg_overlap,
):
model = SkiMSeparator(
input_dim=input_dim,
causal=causal,
num_spk=num_spk,
nonlinear=nonlinear,
layer=layer,
unit=unit,
segment_size=segment_size,
dropout=dropout,
mem_type=mem_type,
seg_overlap=seg_overlap,
)
model.train()
x = torch.rand(2, 10, input_dim)
x_lens = torch.tensor([10, 8], dtype=torch.long)
masked, flens, others = model(x, ilens=x_lens)
assert isinstance(masked[0], Tensor)
assert len(masked) == num_spk
masked[0].abs().mean().backward()
def test_skim_separator_invalid_type():
with pytest.raises(ValueError):
SkiMSeparator(
input_dim=10,
layer=2,
unit=10,
dropout=0.1,
num_spk=2,
nonlinear="fff",
mem_type="aaa",
segment_size=2,
)
def test_skim_separator_output():
x = torch.rand(2, 10, 10)
x_lens = torch.tensor([10, 8], dtype=torch.long)
for num_spk in range(1, 3):
model = SkiMSeparator(
input_dim=10,
layer=2,
unit=10,
dropout=0.1,
num_spk=2,
nonlinear="relu",
segment_size=2,
)
model.eval()
specs, _, others = model(x, x_lens)
assert isinstance(specs, list)
assert isinstance(others, dict)
assert x.shape == specs[0].shape
for n in range(num_spk):
assert "mask_spk{}".format(n + 1) in others
assert specs[n].shape == others["mask_spk{}".format(n + 1)].shape
| 26.615385
| 77
| 0.616921
| 485
| 3,806
| 4.672165
| 0.179381
| 0.088261
| 0.185349
| 0.038835
| 0.775816
| 0.756399
| 0.747573
| 0.729479
| 0.729479
| 0.714916
| 0
| 0.025871
| 0.238308
| 3,806
| 142
| 78
| 26.802817
| 0.755778
| 0
| 0
| 0.704918
| 0
| 0
| 0.05938
| 0
| 0
| 0
| 0
| 0
| 0.07377
| 1
| 0.032787
| false
| 0
| 0.040984
| 0
| 0.07377
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1824879fcb47eef4a999fad8a2458151ff76bfc
| 1,996
|
py
|
Python
|
article/migrations/0005_auto_20201229_2100.py
|
rayenmhamdi/Kye_BackendAPI
|
a4fa8a345d2678b5bd3a9b74451e9274ca9abad2
|
[
"MIT"
] | null | null | null |
article/migrations/0005_auto_20201229_2100.py
|
rayenmhamdi/Kye_BackendAPI
|
a4fa8a345d2678b5bd3a9b74451e9274ca9abad2
|
[
"MIT"
] | null | null | null |
article/migrations/0005_auto_20201229_2100.py
|
rayenmhamdi/Kye_BackendAPI
|
a4fa8a345d2678b5bd3a9b74451e9274ca9abad2
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.4 on 2020-12-29 20:00
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('article', '0004_auto_20201229_1544'),
]
operations = [
migrations.AddField(
model_name='category',
name='date_created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='category',
name='date_modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='marque',
name='date_created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='marque',
name='date_modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AddField(
model_name='product',
name='date_created',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='product',
name='date_modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='product',
name='bar_code',
field=models.CharField(blank=True, max_length=50, null=True, unique=True),
),
migrations.AlterField(
model_name='product',
name='code',
field=models.CharField(max_length=50, unique=True),
),
migrations.AlterField(
model_name='product',
name='marque',
field=models.CharField(blank=True, max_length=50, null=True),
),
]
| 31.68254
| 93
| 0.579158
| 196
| 1,996
| 5.72449
| 0.280612
| 0.072193
| 0.122995
| 0.144385
| 0.801248
| 0.801248
| 0.801248
| 0.713012
| 0.623886
| 0.623886
| 0
| 0.026831
| 0.309118
| 1,996
| 62
| 94
| 32.193548
| 0.786802
| 0.022545
| 0
| 0.75
| 1
| 0
| 0.095434
| 0.011801
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.089286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e188e6fc22339f6e21d88524988c075da16b5918
| 387
|
py
|
Python
|
ESP8266/symbols.py
|
kz3ko/MAX30102-Pulse_oximetry
|
cc9d47e85a4755805fe51b780ba6591ff5f80ee8
|
[
"MIT"
] | null | null | null |
ESP8266/symbols.py
|
kz3ko/MAX30102-Pulse_oximetry
|
cc9d47e85a4755805fe51b780ba6591ff5f80ee8
|
[
"MIT"
] | null | null | null |
ESP8266/symbols.py
|
kz3ko/MAX30102-Pulse_oximetry
|
cc9d47e85a4755805fe51b780ba6591ff5f80ee8
|
[
"MIT"
] | null | null | null |
# Symbol of circle required to show Celsius degree on display.
circle = [
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 1, 0],
[0, 0, 0, 1, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
]
| 35.181818
| 62
| 0.310078
| 75
| 387
| 1.6
| 0.16
| 0.816667
| 1.05
| 1.233333
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0.533333
| 0
| 0.312195
| 0.470284
| 387
| 11
| 63
| 35.181818
| 0.273171
| 0.155039
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
832a9392977dcb5083fd583d6c754fa4f650708e
| 2,625
|
py
|
Python
|
tests/test_year_2005.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 179
|
2017-10-05T12:41:10.000Z
|
2022-03-24T22:18:25.000Z
|
tests/test_year_2005.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-23T00:51:13.000Z
|
2021-11-22T11:40:06.000Z
|
tests/test_year_2005.py
|
l0pht511/jpholiday
|
083145737b61fad3420c066968c4329d17dc3baf
|
[
"MIT"
] | 17
|
2018-10-19T11:13:07.000Z
|
2022-01-29T08:05:56.000Z
|
# coding: utf-8
import datetime
import unittest
import jpholiday
class TestYear2005(unittest.TestCase):
def test_holiday(self):
"""
2005年祝日
"""
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 1, 1)), '元日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 1, 10)), '成人の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 2, 11)), '建国記念の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 3, 20)), '春分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 3, 21)), '春分の日 振替休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 4, 29)), 'みどりの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 5, 3)), '憲法記念日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 5, 4)), '国民の休日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 5, 5)), 'こどもの日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 7, 18)), '海の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 9, 19)), '敬老の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 9, 23)), '秋分の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 10, 10)), '体育の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 11, 3)), '文化の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 11, 23)), '勤労感謝の日')
self.assertEqual(jpholiday.is_holiday_name(datetime.date(2005, 12, 23)), '天皇誕生日')
def test_count_month(self):
"""
2005年月祝日数
"""
self.assertEqual(len(jpholiday.month_holidays(2005, 1)), 2)
self.assertEqual(len(jpholiday.month_holidays(2005, 2)), 1)
self.assertEqual(len(jpholiday.month_holidays(2005, 3)), 2)
self.assertEqual(len(jpholiday.month_holidays(2005, 4)), 1)
self.assertEqual(len(jpholiday.month_holidays(2005, 5)), 3)
self.assertEqual(len(jpholiday.month_holidays(2005, 6)), 0)
self.assertEqual(len(jpholiday.month_holidays(2005, 7)), 1)
self.assertEqual(len(jpholiday.month_holidays(2005, 8)), 0)
self.assertEqual(len(jpholiday.month_holidays(2005, 9)), 2)
self.assertEqual(len(jpholiday.month_holidays(2005, 10)), 1)
self.assertEqual(len(jpholiday.month_holidays(2005, 11)), 2)
self.assertEqual(len(jpholiday.month_holidays(2005, 12)), 1)
def test_count_year(self):
"""
2005年祝日数
"""
self.assertEqual(len(jpholiday.year_holidays(2005)), 16)
| 50.480769
| 92
| 0.681143
| 339
| 2,625
| 5.126844
| 0.182891
| 0.250288
| 0.220944
| 0.239356
| 0.804948
| 0.804948
| 0.804948
| 0.754315
| 0.495397
| 0.342923
| 0
| 0.096092
| 0.171429
| 2,625
| 51
| 93
| 51.470588
| 0.702989
| 0.015619
| 0
| 0
| 0
| 0
| 0.029833
| 0
| 0
| 0
| 0
| 0
| 0.805556
| 1
| 0.083333
| false
| 0
| 0.083333
| 0
| 0.194444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83366136cebcaab620c811056bd9c24a3c4d8fc7
| 135
|
py
|
Python
|
tests/test_bibtex_import.py
|
rycolab/academic-admin
|
9a3afb3fe5e085b5c74b235e3ef2db4f52606d0c
|
[
"MIT"
] | null | null | null |
tests/test_bibtex_import.py
|
rycolab/academic-admin
|
9a3afb3fe5e085b5c74b235e3ef2db4f52606d0c
|
[
"MIT"
] | null | null | null |
tests/test_bibtex_import.py
|
rycolab/academic-admin
|
9a3afb3fe5e085b5c74b235e3ef2db4f52606d0c
|
[
"MIT"
] | null | null | null |
from academic import cli
def test_bibtex_import():
cli.parse_args(["import", "--dry-run", "--bibtex", "tests/data/article.bib"])
| 22.5
| 81
| 0.681481
| 19
| 135
| 4.684211
| 0.789474
| 0.202247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118519
| 135
| 5
| 82
| 27
| 0.747899
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 0.162963
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 1
| 0
| 1.333333
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
834492c43890d0b5100f8eaa0dfe0f124e7aca5c
| 11,898
|
py
|
Python
|
Building/tests/test_building_app_view.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | 1
|
2022-02-15T13:36:29.000Z
|
2022-02-15T13:36:29.000Z
|
Building/tests/test_building_app_view.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
Building/tests/test_building_app_view.py
|
LukaszHoszowski/Django_ProEstate
|
36c5cc25842f4e5afebd9ff6eaa83c9457fb7a3a
|
[
"MIT"
] | null | null | null |
from django.contrib.auth.models import User
from django.urls import reverse
from Building.models import Building, Cartography, HousingCooperative, Flat
def test_view_buildings_user_logged(user_A: User, app_building_factory: Building, payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:buildings')
response = client.get(url)
assert response.status_code == 200
assert str(building).upper() in str(response.content.decode('UTF-8'))
def test_view_buildings_user_anonymous(client, app_building_factory: Building, payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:buildings')
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_details_user_logged(user_A: User, app_building_factory: Building, payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_details', args=[building.slug])
response = client.get(url)
print(response.content.decode('UTF-8'))
assert response.status_code == 200
assert building.street in response.content.decode('UTF-8')
def test_view_building_details_user_anonymous(client, app_building_factory: Building, payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_details', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_flats_user_logged(user_A: User, app_building_factory: Building, payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_flats', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Pełna własność' in response.content.decode('UTF-8')
def test_view_building_flats_user_anonymous(client, app_building_factory: Building, payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_flats', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_cartography_user_logged(user_A: User, app_building_factory: Building,
app_cartography_factory: Cartography, payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
carto = app_cartography_factory(building)
url = reverse('Building:building_cartography', args=[building.slug])
response = client.get(url, slug=building.slug)
assert response.status_code == 200
assert '026401_1.0022.AR_28.86' in response.content.decode('UTF-8')
def test_view_building_cartography_user_anonymous(client, app_building_factory: Building,
app_cartography_factory: Cartography, payment_period_helper):
payment_period_helper
building = app_building_factory(1)
carto = app_cartography_factory(building)
url = reverse('Building:building_cartography', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_coop_user_logged(user_A: User, app_building_factory: Building,
app_coop_factory: HousingCooperative, payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
coop = app_coop_factory
coop.building_set.add(building)
url = reverse('Building:building_coop', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Zarządca' in response.content.decode('UTF-8')
def test_view_building_coop_user_anonymous(client, app_building_factory: Building,
app_coop_factory: HousingCooperative, payment_period_helper):
payment_period_helper
building = app_building_factory(1)
coop = app_coop_factory
coop.building_set.add(building)
url = reverse('Building:building_coop', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_photos_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_photos', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Nie dodano jeszcze zdjęć' in response.content.decode('UTF-8')
def test_view_building_photos_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_photos', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_add_photos_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_photos_add', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Zapisz' in response.content.decode('UTF-8')
def test_view_building_add_photos_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_photos_add', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_documents_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_documents', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Nie dodano jeszcze dokumentów' in response.content.decode('UTF-8')
def test_view_building_documents_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_documents', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_add_documents_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_docs_add', args=[building.slug])
response = client.get(url)
assert response.status_code == 200
assert 'Zapisz' in response.content.decode('UTF-8')
def test_view_building_add_documents_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:building_docs_add', args=[building.slug])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_flat_details_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_details', args=[building.slug, building.flat_set.all().first().id])
response = client.get(url)
assert response.status_code == 200
assert 'Dopisz się do mieszkania' in response.content.decode('UTF-8')
def test_view_building_flat_details_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_details', args=[building.slug, building.flat_set.all().first().id])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_flat_update_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_update', args=[building.slug, building.flat_set.all().first().id])
response = client.get(url)
assert response.status_code == 200
assert 'suffix' in response.content.decode('UTF-8')
def test_view_building_flat_update_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_update', args=[building.slug, building.flat_set.all().first().id])
response = client.get(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_user_to_flat_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
flat = building.flat_set.all().first()
url = reverse('Building:flat_add_user', args=[flat.id])
response = client.post(url)
assert response.status_code == 302
assert user_A.flat_set.all().first() == flat
def test_view_building_user_to_flat_update_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_add_user', args=[building.flat_set.all().first().id])
response = client.post(url)
assert response.status_code == 302
assert 'login' in response.url
def test_view_building_user_from_flat_user_logged(user_A: User, app_building_factory: Building,
payment_period_helper, client):
client.force_login(user_A)
payment_period_helper
building = app_building_factory(1)
flat = building.flat_set.all().first()
url = reverse('Building:flat_delete_user', args=[flat.id])
response = client.post(url)
assert response.status_code == 302
assert user_A.flat_set.all().first() != flat
def test_view_building_user_from_flat_update_user_anonymous(client, app_building_factory: Building,
payment_period_helper):
payment_period_helper
building = app_building_factory(1)
url = reverse('Building:flat_delete_user', args=[building.flat_set.all().first().id])
response = client.post(url)
assert response.status_code == 302
assert 'login' in response.url
| 35.516418
| 120
| 0.703059
| 1,465
| 11,898
| 5.380205
| 0.056655
| 0.07257
| 0.118752
| 0.085765
| 0.966379
| 0.959528
| 0.94608
| 0.941512
| 0.932124
| 0.932124
| 0
| 0.013944
| 0.210371
| 11,898
| 335
| 121
| 35.516418
| 0.825013
| 0
| 0
| 0.779736
| 0
| 0
| 0.074292
| 0.047567
| 0
| 0
| 0
| 0
| 0.229075
| 1
| 0.114537
| false
| 0
| 0.013216
| 0
| 0.127753
| 0.004405
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
55cb18582abfbd9bfcd0aef7b758dae40e2ee2aa
| 63,214
|
py
|
Python
|
sim21/unitop/Compressor.py
|
kpatvt/sim21
|
4cbbfcbef6371d3dc5404429545e003a48c69ba5
|
[
"Artistic-2.0"
] | 7
|
2021-08-23T18:46:27.000Z
|
2022-01-26T07:10:22.000Z
|
sim21/unitop/Compressor.py
|
kpatvt/sim21
|
4cbbfcbef6371d3dc5404429545e003a48c69ba5
|
[
"Artistic-2.0"
] | null | null | null |
sim21/unitop/Compressor.py
|
kpatvt/sim21
|
4cbbfcbef6371d3dc5404429545e003a48c69ba5
|
[
"Artistic-2.0"
] | null | null | null |
"""Models for compression/expansion equipment
Classes:
IdealCompressorExpander -- an isentropic compressor expander base class
IdealCompressor - an isentropic compressor
IdealExpander - an isentropic expander
Compressor - compressor with adiabatic efficiency
Expander - expander with adiabatic efficiency
"""
# PRESSURE DROP CALCULATION CREATED BY NORFAIZAH ON 26TH FEBRUARY 2003
from sim21.solver.Messages import MessageHandler
from sim21.unitop import UnitOperations
from sim21.unitop import Balance, Heater, Set, Sensor, Stream, Pump
from sim21.solver.Variables import *
import math
VALID_UNIT_OPERATIONS = ['Compressor',
'Expander',
'CompressorWithCurve',
'ExpanderWithCurve']
ISENTROPIC_PAR = 'Isentropic'
HEAD_SERIES = 'HeadCurve'
FLOW_SERIES = 'FlowCurve'
EFFICIENCY_SERIES = 'EfficiencyCurve'
POWER_SERIES = 'Power'
HEAD_PORT = 'Head'
POLYTROPIC_EFF_PORT = 'PolytropicEff'
ADIABATIC_EFF_PORT = 'AdiabaticEff'
EFFICIENCY_PORT = "Efficiency" # Old name. NOT USED !!
COMPRESSORCURVE_OBJ = 'CompressorCurve'
EXPANDERCURVE_OBJ = 'ExpanderCurve'
SERIES_OBJ = 'Series'
TABLE_OBJ = 'Table'
NUMBTABLE_PAR = 'NumberTables'
NUMBSERIES_PAR = 'NumberSeries'
TABLETAGTYPE_PAR = 'TableType'
SERIESTYPE_PAR = 'SeriesType'
EXTRAPOLATE_PAR = 'Extrapolate'
SPEC_TAG_PORT = 'SpecTagValue'
TABLETAG_VAR = 'TagValue' # Basic Object of class ATable
COMPRESSORSPEED_PORT = 'CompressorSpeed'
EXPANDERSPEED_PORT = 'ExpanderSpeed'
IGNORECURVE_PAR = 'IgnoreCurve'
EFFCURVETYPE_PAR = 'EfficiencyCurveType'
ADIABATIC_TYPE = 'Adiabatic'
POLYTROPIC_TYPE = 'Polytropic'
# defines for EquationUnit
NUMBSIG_PAR = 'NumberSignal'
TRANSFORM_EQT_PAR = 'Equation'
class IdealCompressorExpander(UnitOperations.UnitOperation):
"""Isentropic compressor/expander"""
def __init__(self, isCompressor=1, initScript=None):
"""
Just do balance and conserve entropy
isCompressor determines energy flow direction
"""
UnitOperations.UnitOperation.__init__(self, initScript)
self.balance = Balance.Balance(Balance.MOLE_BALANCE | Balance.ENERGY_BALANCE)
self.lastPIn = None
self.lastPOut = None
inPort = self.CreatePort(MAT | IN, IN_PORT)
outPort = self.CreatePort(MAT | OUT, OUT_PORT)
dpPort = self.CreatePort(SIG, DELTAP_PORT)
dpPort.SetSignalType(DELTAP_VAR)
self.isCompressor = isCompressor
if isCompressor:
qPort = self.CreatePort(ENE | IN, IN_PORT + 'Q')
self.balance.AddInput((inPort, qPort))
self.balance.AddOutput(outPort)
else:
qPort = self.CreatePort(ENE | OUT, OUT_PORT + 'Q')
self.balance.AddInput(inPort)
self.balance.AddOutput((outPort, qPort))
def CleanUp(self):
self.balance.CleanUp()
self.balance = None
super(IdealCompressorExpander, self).CleanUp()
def Solve(self):
"""Solve"""
# if not self.ValidateOk(): return None
self.FlashAllPorts() # make sure anything that can be flashed has been
inport = self.GetPort(IN_PORT)
outport = self.GetPort(OUT_PORT)
inport.SharePropWith(outport, S_VAR)
self.balance.DoBalance()
while self.FlashAllPorts():
self.balance.DoBalance()
self.balance.DoBalance()
# Check if a HS flash can still be done
propsIn = inport.GetProperties()
propsOut = outport.GetProperties()
P, T = propsOut[P_VAR], propsOut[T_VAR]
if P.GetValue() is None and T.GetValue() is None:
H, S, PIn, TIn = propsOut[H_VAR], propsOut[S_VAR], propsIn[P_VAR], propsIn[T_VAR]
if H.GetValue() is not None and S.GetValue() is not None and PIn.GetValue() is not None:
unitOp, frac = self, outport.GetCompositionValues()
knownTargetProp = (S_VAR, S.GetValue(), S.GetType().scaleFactor)
knownFlashProp = (H_VAR, H.GetValue(), H.GetType().scaleFactor)
iterProp = (P_VAR, PIn.GetValue(), P.GetType().scaleFactor)
if self.isCompressor:
min_pin, max_pout = PIn.GetValue(), 10.0 * PIn.GetValue()
else:
min_pin, max_pout = PIn.GetValue() / 10.0, PIn.GetValue()
converged = 0
try:
val, converged = UnitOperations.CalculateNonSupportedFlash(unitOp, frac, knownTargetProp,
knownFlashProp, iterProp, self.lastPOut,
min_pin, max_pout)
finally:
if converged:
self.lastPOut = val
P.SetValue(val, CALCULATED_V)
self.FlashAllPorts()
else:
self.lastPOut = None
self.InfoMessage('CouldNotSolveNonSuppFlash',
(H_VAR, str(H.GetValue()), S_VAR, str(S.GetValue()), self.GetPath()))
P, T = propsIn[P_VAR], propsIn[T_VAR]
if P.GetValue() is None and T.GetValue() is None:
H, S, POut, TOut = propsIn[H_VAR], propsIn[S_VAR], propsOut[P_VAR], propsOut[T_VAR]
if H.GetValue() is not None and S.GetValue() is not None and POut.GetValue() is not None:
unitOp, frac = self, inport.GetCompositionValues()
knownTargetProp = (S_VAR, S.GetValue(), S.GetType().scaleFactor)
knownFlashProp = (H_VAR, H.GetValue(), H.GetType().scaleFactor)
iterProp = (P_VAR, POut.GetValue(), P.GetType().scaleFactor)
if self.isCompressor:
min_pin, max_pout = POut.GetValue() / 10.0, POut.GetValue()
else:
min_pin, max_pout = POut.GetValue(), 10.0 * POut.GetValue()
converged = 0
try:
val, converged = UnitOperations.CalculateNonSupportedFlash(unitOp, frac, knownTargetProp,
knownFlashProp, iterProp, self.lastPIn,
min_pin, max_pout)
finally:
if converged:
self.lastPIn = val
P.SetValue(val, CALCULATED_V)
self.FlashAllPorts()
else:
self.lastPIn = None
self.InfoMessage('CouldNotSolveNonSuppFlash',
(H_VAR, str(H.GetValue()), S_VAR, str(S.GetValue()), self.GetPath()))
dpPort = self.GetPort(DELTAP_PORT)
pIn = inport.GetPropValue(P_VAR)
pOut = outport.GetPropValue(P_VAR)
dp = dpPort.GetValue()
sign = 1
if not self.isCompressor:
sign = -1
if pOut is None:
if dp is not None and pIn is not None:
outport.SetPropValue(P_VAR, pIn + sign * dp, CALCULATED_V)
elif pIn is None:
if dp is not None:
inport.SetPropValue(P_VAR, pOut - sign * dp, CALCULATED_V)
else:
dpPort.SetPropValue(DELTAP_VAR, sign * (pOut - pIn), CALCULATED_V)
return 1
def _CloneCreate(self):
"""By default just clone with the __class__ call"""
clone = self.__class__(self.isCompressor)
return clone
class IdealCompressor(IdealCompressorExpander):
def __init__(self, initScript=None):
IdealCompressorExpander.__init__(self, 1, initScript)
class IdealExpander(IdealCompressorExpander):
def __init__(self, initScript=None):
IdealCompressorExpander.__init__(self, 0, initScript)
class Compressor(UnitOperations.UnitOperation):
"""
Adiabatic Compressor made from ideal compressor, set and heater
"""
def __init__(self, initScript=None):
"""Init compressor - build it from IdealCompressorExpander,
Heater and Set operations
"""
UnitOperations.UnitOperation.__init__(self, initScript)
# the isentropic compressor
self.ideal = IdealCompressorExpander(1)
self.AddUnitOperation(self.ideal, 'Ideal')
# a heater to add the waste heat to the outlet
self.waste = Heater.Heater()
self.AddUnitOperation(self.waste, 'Waste')
self.waste.GetPort(DELTAP_PORT).SetValue(0.0, FIXED_V)
# connect them
self.ConnectPorts('Ideal', OUT_PORT, 'Waste', IN_PORT)
# energy sensors (needed for signals)
self.idealQ = Sensor.EnergySensor()
self.AddUnitOperation(self.idealQ, 'IdealQ')
self.ConnectPorts('Ideal', IN_PORT + 'Q', 'IdealQ', OUT_PORT)
self.wasteQ = Sensor.EnergySensor()
self.AddUnitOperation(self.wasteQ, 'WasteQ')
self.ConnectPorts('Waste', IN_PORT + 'Q', 'WasteQ', OUT_PORT)
self.totalQ = Sensor.EnergySensor()
self.AddUnitOperation(self.totalQ, 'TotalQ')
# create a signal stream for the efficiency
self.effStream = Stream.Stream_Signal()
self.effStream.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(self.effStream, 'EfficiencySig')
# set relation between ideal and total Q
self.set = Set.Set()
self.AddUnitOperation(self.set, 'Set')
self.set.SetParameterValue(SIGTYPE_PAR, ENERGY_VAR)
self.set.GetPort(Set.ADD_PORT).SetValue(0.0, FIXED_V)
self.ConnectPorts('TotalQ', SIG_PORT, 'Set', SIG_PORT + '0')
self.ConnectPorts('IdealQ', SIG_PORT, 'Set', SIG_PORT + '1')
self.ConnectPorts('EfficiencySig', OUT_PORT, 'Set', Set.MULT_PORT)
# energy stream balance
self.mix = Balance.BalanceOp()
self.AddUnitOperation(self.mix, 'Mix')
self.mix.SetParameterValue(NUSTIN_PAR + Balance.S_ENE, 1)
self.mix.SetParameterValue(NUSTOUT_PAR + Balance.S_ENE, 2)
self.mix.SetParameterValue(Balance.BALANCETYPE_PAR, Balance.ENERGY_BALANCE)
# connect the mixer ports
self.ConnectPorts('IdealQ', IN_PORT, 'Mix', OUT_PORT + 'Q0')
self.ConnectPorts('WasteQ', IN_PORT, 'Mix', OUT_PORT + 'Q1')
self.ConnectPorts('TotalQ', OUT_PORT, 'Mix', IN_PORT + 'Q0')
# export the flow ports
self.BorrowChildPort(self.ideal.GetPort(IN_PORT), IN_PORT)
self.BorrowChildPort(self.waste.GetPort(OUT_PORT), OUT_PORT)
self.BorrowChildPort(self.totalQ.GetPort(IN_PORT), IN_PORT + 'Q')
self.BorrowChildPort(self.effStream.GetPort(IN_PORT), ADIABATIC_EFF_PORT)
self.BorrowChildPort(self.ideal.GetPort(DELTAP_PORT), DELTAP_PORT)
# Change the type of the energy port such that it is in Work units and scaling
self.totalQ.GetPort(IN_PORT).GetProperty().SetTypeByName(WORK_VAR)
# Polytropic efficiency
self.polEffPort = self.CreatePort(SIG, POLYTROPIC_EFF_PORT)
self.polEffPort.SetSignalType(GENERIC_VAR)
self.nuSolids = 0
def Solve(self):
super(Compressor, self).Solve()
inPort = self.GetPort(IN_PORT)
outPort = self.GetPort(OUT_PORT)
sPort = self.ideal.GetPort(OUT_PORT)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
self._thCaseObj = self.GetThermo()
if not self._thCaseObj:
return
self.nuSolids = self.NumberSolidPhases()
inFlashed = inPort.AlreadyFlashed()
outFlashed = outPort.AlreadyFlashed()
sFlashed = sPort.AlreadyFlashed()
if mf is None:
if inFlashed and sFlashed:
self.SolveForMoleFlow(inPort, outPort, sPort)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
if not inFlashed and outFlashed and mf is not None:
self.SolveForInlet(inPort, outPort, sPort)
# Solve for polytropic eff if in and out are known
if inFlashed and outFlashed:
self.SolveForPolytropicEff(inPort, outPort, sPort)
return
# Solve for isentropic efficiency if isentropic port is known
if sFlashed and inFlashed and not outFlashed:
# Solve for efficiency
self.SolveForIsentropicEff(inPort, outPort, sPort)
def SolveForInlet(self, inPort, outPort, sPort):
h1 = outPort.GetPropValue(H_VAR)
p1 = outPort.GetPropValue(P_VAR)
p0 = inPort.GetPropValue(P_VAR)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
fracs = inPort.GetCompositionValues()
# Which efficiency is known?
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
usesPolytropic = 0
if isEff is None:
polEff = self.polEffPort.GetValue()
if not polEff:
return
else:
usesPolytropic = 1
ps = p1
if None in (h1, p1, p0, mf) or (fracs is None) or (None in fracs):
return
# Initialize h0
h0 = h1
maxIter = 30
iterations = 0
converged = 0
tolerance = 1.0E-6
scale = 1000.0
shift = 1.0
maxStep = 5000.0
while not converged and iterations < maxIter:
iterations += 1
s0, v0 = self.GetPropertiesPH(p0, h0, fracs, (S_VAR, molarV_VAR))
hs, vs = self.GetPropertiesPS(p1, s0, fracs, (H_VAR, molarV_VAR))
if usesPolytropic:
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
err = (hs - h0) - (h1 - h0) * isEff
err /= scale
# Leave if converged
if abs(err) <= tolerance:
converged = 1
break
# Calculate crude derivative
h0Temp = h0 + shift
s0, v0 = self.GetPropertiesPH(p0, h0Temp, fracs, (S_VAR, molarV_VAR))
hs, vs = self.GetPropertiesPS(p1, s0, fracs, (H_VAR, molarV_VAR))
if usesPolytropic:
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
errTemp = (hs - h0Temp) - (h1 - h0Temp) * isEff
errTemp /= scale
dErr_dEff = (errTemp - err) / shift
step = - err / dErr_dEff
step = max(step, -maxStep)
step = min(step, maxStep)
h0 += step
if converged:
inPort.SetPropValue(H_VAR, h0, CALCULATED_V | PARENT_V)
else:
self.InfoMessage('CouldNotConverge', self.GetPath(), iterations)
def SolveForMoleFlow(self, inPort, outPort, sPort):
"""solve for mole flow"""
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if h0 is None or hs is None:
return
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
if not isEff:
polEff = self.polEffPort.GetValue()
if not polEff:
return
# See if we can get isentropic eff from the polytropic one
mf = None
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
fracs = inPort.GetCompositionValues()
ps = p1 = sPort.GetPropValue(P_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if (None in (p0, h0, v0, ps, vs, hs)) or (fracs is None) or (None in fracs):
return
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
else:
self.GetPort(ADIABATIC_EFF_PORT).SetValue(isEff, CALCULATED_V | PARENT_V)
if not isEff:
return
# Calculate mole flow
h1 = (hs - h0) / isEff + h0
outPort.SetPropValue(H_VAR, h1, CALCULATED_V | PARENT_V)
# Solve for mole flow if possible
q = self.GetPort(IN_PORT + 'Q').GetValue() # J/s
if q is None:
return
mf = q * 3.6 / (h1 - h0) # kmol/h
inPort.SetPropValue(MOLEFLOW_VAR, mf, CALCULATED_V | PARENT_V)
def SolveForPolytropicEff(self, inPort, outPort, sPort):
"""Solve for hte polytropic efficiency for a given in, out and isentropic port"""
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
if not isEff:
return
mf = inPort.GetPropValue(MOLEFLOW_VAR) # kmol/h
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
p1 = outPort.GetPropValue(P_VAR) # kPa
h1 = outPort.GetPropValue(H_VAR) # kJ/kmol
v1 = outPort.GetPropValue(molarV_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if None in (mf, p0, h0, v0, p1, h1, v1, vs, hs):
return
try:
polEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
self.polEffPort.SetValue(polEff, CALCULATED_V)
except:
pass
def SolveForIsentropicEff(self, inPort, outPort, sPort):
polEff = self.polEffPort.GetValue()
if not polEff:
return
mf = inPort.GetPropValue(MOLEFLOW_VAR) # kmol/h
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
fracs = inPort.GetCompositionValues()
ps = p1 = sPort.GetPropValue(P_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if (None in (mf, p0, h0, v0, ps, vs, hs)) or (fracs is None) or (None in fracs):
return
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
else:
self.GetPort(ADIABATIC_EFF_PORT).SetValue(isEff, CALCULATED_V | PARENT_V)
def FromPolyToIsenEff(self, *args):
"""Calcaulte for isentropic efficiency based on the polytropic efficiency and other properties"""
polEff, p0, h0, v0, ps, vs, hs, fracs = args
p1 = ps
# Iterate on isentropic efficiency until polytropic is matched
isEff = polEff
dhs = hs - h0
maxIter = 30
iterations = 0
converged = 0
tolerance = 1.0E-6
shift = 0.0001
while not converged and iterations < maxIter:
iterations += 1
h1 = dhs / isEff + h0
v1 = self.GetPropertiesPH(ps, h1, fracs, (molarV_VAR,))[0]
iterPolEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
err = iterPolEff - polEff
# Leave if converged
if abs(err) <= tolerance:
converged = 1
break
# Calculate crude derivative
h1 = dhs / (isEff + shift) + h0
v1 = self.GetPropertiesPH(ps, h1, fracs, (molarV_VAR,))[0]
iterPolEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
errTemp = iterPolEff - polEff
dErr_dEff = (errTemp - err) / shift
isEff = isEff - err / dErr_dEff
status = (iterations,)
if not converged:
return None, status
else:
return isEff, status
def CalcPolytropicEff(self, *args):
p0, h0, v0, p1, h1, v1, vs, hs = args
n = math.log(p1 / p0) / math.log(v0 / v1)
ns = math.log(p1 / p0) / math.log(v0 / vs)
f = (hs - h0) / ((ns / (ns - 1.0)) * (p1 * vs - p0 * v0))
wp = f * (n / (n - 1.0)) * (p1 * v1 - p0 * v0)
return wp / (h1 - h0)
def GetPropertiesPH(self, p, h, fracs, props, phase=OVERALL_PHASE):
thAdmin, prov, case = self._thCaseObj.thermoAdmin, self._thCaseObj.provider, self._thCaseObj.case
if not self.nuSolids:
inProp1 = [P_VAR, p]
inProp2 = [H_VAR, h]
vals = thAdmin.GetProperties(prov, case, inProp1, inProp2, phase, fracs, props)
else:
matDict = MaterialPropertyDict()
matDict[P_VAR].SetValue(p, FIXED_V)
matDict[H_VAR].SetValue(h, FIXED_V)
cmps = CompoundList(None)
for i in range(len(fracs)):
cmps.append(BasicProperty(FRAC_VAR))
cmps.SetValues(fracs, FIXED_V)
liqPhases = 1
results = thAdmin.Flash(prov, case, cmps, matDict, liqPhases, props, nuSolids=self.nuSolids)
vals = results.bulkProps
return vals
def GetPropertiesPS(self, p, s, fracs, props, phase=OVERALL_PHASE):
thAdmin, prov, case = self._thCaseObj.thermoAdmin, self._thCaseObj.provider, self._thCaseObj.case
if not self.nuSolids:
inProp1 = [P_VAR, p]
inProp2 = [S_VAR, s]
vals = thAdmin.GetProperties(prov, case, inProp1, inProp2, phase, fracs, props)
else:
matDict = MaterialPropertyDict()
matDict[P_VAR].SetValue(p, FIXED_V)
matDict[S_VAR].SetValue(s, FIXED_V)
cmps = CompoundList(None)
for i in range(len(fracs)):
cmps.append(BasicProperty(FRAC_VAR))
cmps.SetValues(fracs, FIXED_V)
liqPhases = 1
results = thAdmin.Flash(prov, case, cmps, matDict, liqPhases, props, nuSolids=self.nuSolids)
vals = results.bulkProps
return vals
def CleanUp(self):
self.ideal = self.waste = self.idealQ = None
self.wasteQ = self.totalQ = self.effStream = None
self.set = self.mix = None
super(Compressor, self).CleanUp()
def GetObject(self, name):
# Backward compatibility
if name == EFFICIENCY_PORT:
name = ADIABATIC_EFF_PORT
return super(Compressor, self).GetObject(name)
class Expander(UnitOperations.UnitOperation):
"""
Adiabatic Expander made from ideal compressor, set and heater
"""
def __init__(self, initScript=None):
"""Init expander - build it from IdealCompressorExpander,
Heater and Set operations
"""
UnitOperations.UnitOperation.__init__(self, initScript)
# the isentropic expander
self.ideal = IdealCompressorExpander(0)
self.AddUnitOperation(self.ideal, 'Ideal')
# a heater to add the waste heat to the outlet
self.waste = Heater.Heater()
self.AddUnitOperation(self.waste, 'Waste')
self.waste.GetPort(DELTAP_PORT).SetValue(0.0, FIXED_V)
# connect them
self.ConnectPorts('Ideal', OUT_PORT, 'Waste', IN_PORT)
# energy sensors (needed for signals)
self.idealQ = Sensor.EnergySensor()
self.AddUnitOperation(self.idealQ, 'IdealQ')
self.ConnectPorts('Ideal', OUT_PORT + 'Q', 'IdealQ', IN_PORT)
self.wasteQ = Sensor.EnergySensor()
self.AddUnitOperation(self.wasteQ, 'WasteQ')
self.ConnectPorts('Waste', IN_PORT + 'Q', 'WasteQ', OUT_PORT)
self.totalQ = Sensor.EnergySensor()
self.AddUnitOperation(self.totalQ, 'TotalQ')
# create a signal stream for the efficiency
self.effStream = Stream.Stream_Signal()
self.effStream.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(self.effStream, 'EfficiencySig')
# set relation between ideal and total Q
self.set = Set.Set()
self.AddUnitOperation(self.set, 'Set')
self.set.SetParameterValue(SIGTYPE_PAR, ENERGY_VAR)
self.set.GetPort(Set.ADD_PORT).SetValue(0.0, FIXED_V)
self.ConnectPorts('TotalQ', SIG_PORT, 'Set', SIG_PORT + '1')
self.ConnectPorts('IdealQ', SIG_PORT, 'Set', SIG_PORT + '0')
self.ConnectPorts('EfficiencySig', OUT_PORT, 'Set', Set.MULT_PORT)
# energy stream balance
self.mix = Balance.BalanceOp()
self.AddUnitOperation(self.mix, 'Mix')
self.mix.SetParameterValue(NUSTIN_PAR + Balance.S_ENE, 1)
self.mix.SetParameterValue(NUSTOUT_PAR + Balance.S_ENE, 2)
self.mix.SetParameterValue(Balance.BALANCETYPE_PAR, Balance.ENERGY_BALANCE)
# connect the mixer ports
self.ConnectPorts('IdealQ', OUT_PORT, 'Mix', IN_PORT + 'Q0')
self.ConnectPorts('WasteQ', IN_PORT, 'Mix', OUT_PORT + 'Q1')
self.ConnectPorts('TotalQ', IN_PORT, 'Mix', OUT_PORT + 'Q0')
# export the flow ports
self.BorrowChildPort(self.ideal.GetPort(IN_PORT), IN_PORT)
self.BorrowChildPort(self.waste.GetPort(OUT_PORT), OUT_PORT)
self.BorrowChildPort(self.totalQ.GetPort(OUT_PORT), OUT_PORT + 'Q')
self.BorrowChildPort(self.effStream.GetPort(IN_PORT), ADIABATIC_EFF_PORT)
self.BorrowChildPort(self.ideal.GetPort(DELTAP_PORT), DELTAP_PORT)
# Change the type of the energy port such that it is in Work units and scaling
self.totalQ.GetPort(OUT_PORT).GetProperty().SetTypeByName(WORK_VAR)
# Polytropic efficiency
self.polEffPort = self.CreatePort(SIG, POLYTROPIC_EFF_PORT)
self.polEffPort.SetSignalType(GENERIC_VAR)
self.nuSolids = 0
def Solve(self):
super(Expander, self).Solve()
inPort = self.GetPort(IN_PORT)
outPort = self.GetPort(OUT_PORT)
sPort = self.ideal.GetPort(OUT_PORT)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
self._thCaseObj = self.GetThermo()
if not self._thCaseObj:
return
self.nuSolids = self.NumberSolidPhases()
inFlashed = inPort.AlreadyFlashed()
outFlashed = outPort.AlreadyFlashed()
sFlashed = sPort.AlreadyFlashed()
if mf is None:
if inFlashed and sFlashed:
self.SolveForMoleFlow(inPort, outPort, sPort)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
if not inFlashed and outFlashed and mf is not None:
self.SolveForInlet(inPort, outPort, sPort)
# Solve for polytropic eff if in and out are known
if inFlashed and outFlashed:
self.SolveForPolytropicEff(inPort, outPort, sPort)
return
# Solve for isentropic efficiency if isentropic port is known
if sFlashed and inFlashed and not outFlashed:
# Solve for efficiency
self.SolveForIsentropicEff(inPort, outPort, sPort)
def SolveForInlet(self, inPort, outPort, sPort):
h1 = outPort.GetPropValue(H_VAR)
p1 = outPort.GetPropValue(P_VAR)
p0 = inPort.GetPropValue(P_VAR)
mf = inPort.GetPropValue(MOLEFLOW_VAR)
fracs = inPort.GetCompositionValues()
# Which efficiency is known?
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
usesPolytropic = 0
if isEff is None:
polEff = self.polEffPort.GetValue()
if not polEff:
return
else:
usesPolytropic = 1
ps = p1
if None in (h1, p1, p0, mf) or (fracs is None) or (None in fracs):
return
# Initialize h0
h0 = h1
maxIter = 30
iterations = 0
converged = 0
scale = 1000.0
tolerance = 1.0E-6
shift = 1.0
maxStep = 5000.0
while not converged and iterations < maxIter:
iterations += 1
s0, v0 = self.GetPropertiesPH(p0, h0, fracs, (S_VAR, molarV_VAR))
hs, vs = self.GetPropertiesPS(p1, s0, fracs, (H_VAR, molarV_VAR))
if usesPolytropic:
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
err = (hs - h0) - (h1 - h0) / isEff
err /= scale
# Leave if converged
if abs(err) <= tolerance:
converged = 1
break
# Calculate crude derivative
h0Temp = h0 + shift
s0, v0 = self.GetPropertiesPH(p0, h0Temp, fracs, (S_VAR, molarV_VAR))
hs, vs = self.GetPropertiesPS(p1, s0, fracs, (H_VAR, molarV_VAR))
if usesPolytropic:
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
errTemp = (hs - h0Temp) - (h1 - h0Temp) / isEff
errTemp /= scale
dErr_dEff = (errTemp - err) / shift
step = - err / dErr_dEff
step = max(step, -maxStep)
step = min(step, maxStep)
h0 += step
if converged:
inPort.SetPropValue(H_VAR, h0, CALCULATED_V | PARENT_V)
else:
self.InfoMessage('CouldNotConverge', self.GetPath(), iterations)
def SolveForMoleFlow(self, inPort, outPort, sPort):
"""solve for mole flow"""
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if h0 is None or hs is None:
return
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
if not isEff:
polEff = self.polEffPort.GetValue()
if not polEff:
return
# See if we can get isentropic eff from the polytropic one
mf = None
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
fracs = inPort.GetCompositionValues()
ps = p1 = sPort.GetPropValue(P_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if (None in (p0, h0, v0, ps, vs, hs)) or (fracs is None) or (None in fracs):
return
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
return
else:
self.GetPort(ADIABATIC_EFF_PORT).SetValue(isEff, CALCULATED_V | PARENT_V)
if not isEff:
return
# Calculate h1
h1 = (hs - h0) * isEff + h0
outPort.SetPropValue(H_VAR, h1, CALCULATED_V | PARENT_V)
# Solve for mole flow if possible
q = self.GetPort(OUT_PORT + 'Q').GetValue() # J/s
if q is None:
return
mf = q * 3.6 / (h1 - h0) # kmol/h
inPort.SetPropValue(MOLEFLOW_VAR, mf, CALCULATED_V | PARENT_V)
def SolveForPolytropicEff(self, inPort, outPort, sPort):
"""Solve for he polytropic efficiency for a given in, out and isentropic port"""
isEff = self.GetPort(ADIABATIC_EFF_PORT).GetValue()
if not isEff:
return
mf = inPort.GetPropValue(MOLEFLOW_VAR) # kmol/h
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
p1 = outPort.GetPropValue(P_VAR) # kPa
h1 = outPort.GetPropValue(H_VAR) # kJ/kmol
v1 = outPort.GetPropValue(molarV_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if None in (mf, p0, h0, v0, p1, h1, v1, vs, hs):
return
try:
polEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
self.polEffPort.SetValue(polEff, CALCULATED_V)
except:
pass
def SolveForIsentropicEff(self, inPort, outPort, sPort):
polEff = self.polEffPort.GetValue()
if not polEff:
return
mf = inPort.GetPropValue(MOLEFLOW_VAR) # kmol/h
p0 = inPort.GetPropValue(P_VAR) # kPa
h0 = inPort.GetPropValue(H_VAR) # kJ/kmol
v0 = inPort.GetPropValue(molarV_VAR) # m3/kmol
fracs = inPort.GetCompositionValues()
ps = p1 = sPort.GetPropValue(P_VAR) # m3/kmol
vs = sPort.GetPropValue(molarV_VAR) # m3/kmol
hs = sPort.GetPropValue(H_VAR) # kJ/kmol
if (None in (mf, p0, h0, v0, ps, vs, hs)) or (fracs is None) or (None in fracs):
return
isEff, status = self.FromPolyToIsenEff(polEff, p0, h0, v0, ps, vs, hs, fracs)
if isEff is None:
self.InfoMessage('CouldNotConverge', self.GetPath(), status[0])
else:
self.GetPort(ADIABATIC_EFF_PORT).SetValue(isEff, CALCULATED_V | PARENT_V)
def FromPolyToIsenEff(self, *args):
"""Calcaulte for isentropic efficiency based on the polytropic efficiency and other properties"""
polEff, p0, h0, v0, ps, vs, hs, fracs = args
p1 = ps
# Iterate on isentropic efficiency until polytropic is matched
isEff = polEff
dhs = hs - h0
maxIter = 30
iterations = 0
converged = 0
tolerance = 1.0E-6
shift = 0.0001
while not converged and iterations < maxIter:
iterations += 1
h1 = dhs * isEff + h0
v1 = self.GetPropertiesPH(ps, h1, fracs, (molarV_VAR,))[0]
iterPolEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
err = iterPolEff - polEff
# Leave if converged
if abs(err) <= tolerance:
converged = 1
break
# Calculate crude derivative
h1 = dhs * (isEff + shift) + h0
v1 = self.GetPropertiesPH(ps, h1, fracs, (molarV_VAR,))[0]
iterPolEff = self.CalcPolytropicEff(p0, h0, v0, p1, h1, v1, vs, hs)
errTemp = iterPolEff - polEff
dErr_dEff = (errTemp - err) / shift
isEff = isEff - err / dErr_dEff
status = (iterations,)
if not converged:
return None, status
else:
return isEff, status
def CalcPolytropicEff(self, *args):
p0, h0, v0, p1, h1, v1, vs, hs = args
n = math.log(p1 / p0) / math.log(v0 / v1)
ns = math.log(p1 / p0) / math.log(v0 / vs)
f = (hs - h0) / ((ns / (ns - 1.0)) * (p1 * vs - p0 * v0))
wp = f * (n / (n - 1.0)) * (p1 * v1 - p0 * v0)
return (h1 - h0) / wp
def GetPropertiesPH(self, p, h, fracs, props, phase=OVERALL_PHASE):
thAdmin, prov, case = self._thCaseObj.thermoAdmin, self._thCaseObj.provider, self._thCaseObj.case
if not self.nuSolids:
inProp1 = [P_VAR, p]
inProp2 = [H_VAR, h]
vals = thAdmin.GetProperties(prov, case, inProp1, inProp2, phase, fracs, props)
else:
matDict = MaterialPropertyDict()
matDict[P_VAR].SetValue(p, FIXED_V)
matDict[H_VAR].SetValue(h, FIXED_V)
cmps = CompoundList(None)
for i in range(len(fracs)):
cmps.append(BasicProperty(FRAC_VAR))
cmps.SetValues(fracs, FIXED_V)
liqPhases = 1
results = thAdmin.Flash(prov, case, cmps, matDict, liqPhases, props, nuSolids=self.nuSolids)
vals = results.bulkProps
return vals
def GetPropertiesPS(self, p, s, fracs, props, phase=OVERALL_PHASE):
thAdmin, prov, case = self._thCaseObj.thermoAdmin, self._thCaseObj.provider, self._thCaseObj.case
if not self.nuSolids:
inProp1 = [P_VAR, p]
inProp2 = [S_VAR, s]
vals = thAdmin.GetProperties(prov, case, inProp1, inProp2, phase, fracs, props)
else:
matDict = MaterialPropertyDict()
matDict[P_VAR].SetValue(p, FIXED_V)
matDict[S_VAR].SetValue(s, FIXED_V)
cmps = CompoundList(None)
for i in range(len(fracs)):
cmps.append(BasicProperty(FRAC_VAR))
cmps.SetValues(fracs, FIXED_V)
liqPhases = 1
results = thAdmin.Flash(prov, case, cmps, matDict, liqPhases, props, nuSolids=self.nuSolids)
vals = results.bulkProps
return vals
def CleanUp(self):
self.ideal = self.waste = self.idealQ = None
self.wasteQ = self.totalQ = self.effStream = None
self.set = self.mix = None
self._thCaseObj = None
super(Expander, self).CleanUp()
def GetObject(self, name):
# BAckward compatibility
if name == EFFICIENCY_PORT:
name = ADIABATIC_EFF_PORT
return super(Expander, self).GetObject(name)
class CompressorWithCurve(UnitOperations.UnitOperation):
def __init__(self, initScript=None):
# A Compressor
# with Compressor curves, do not preserve entropy
super(CompressorWithCurve, self).__init__(initScript)
compressor = self.HCompressor = Compressor()
self.AddUnitOperation(compressor, 'IsenthalpicCompressor')
compressor.SetParameterValue(ISENTROPIC_PAR, 0)
# Inlet P sensor
self.InPSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.InPSensor, 'InPSensor')
self.InPSensor.SetParameterValue(SIGTYPE_PAR, P_VAR)
# Outlet P sensor
self.OutPSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.OutPSensor, 'OutPSensor')
self.OutPSensor.SetParameterValue(SIGTYPE_PAR, P_VAR)
# A set to set the delP between the inlet and outlet P
self.SetP = Set.Set()
self.AddUnitOperation(self.SetP, 'SetP')
self.SetP.SetParameterValue(SIGTYPE_PAR, P_VAR)
self.SetP.GetPort(Set.MULT_PORT).SetValue(1.0, FIXED_V)
# A table lookup unit
lookupTable = self.LookupTable = Pump.LookupTable()
self.AddUnitOperation(lookupTable, 'LookupTable')
lookupTable.SetParameterValue(NUMBSERIES_PAR, 4) # 4 series: vol flow, head, efficiency and power
lookupTable.SetParameterValue(EXTRAPOLATE_PAR + str(2), 0) # do not extrapolate efficiency
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(0), VOLFLOW_VAR)
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(1), LENGTH_VAR)
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(3), ENERGY_VAR) # actually power
lookupTable.SetParameterValue(TABLETAGTYPE_PAR, GENERIC_VAR) # i do not yet have a RPM
# A flow sensor
self.FlowSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.FlowSensor, 'FlowSensor')
self.FlowSensor.SetParameterValue(SIGTYPE_PAR, VOLFLOW_VAR)
# An equation unit to convert the head to delP
# delP = MW * 0.00981 * head / molarVol
# Note cannot back out mass density from head and delta pressure
calcDP = self.CalcDelP = Pump.EquationUnit()
self.AddUnitOperation(calcDP, 'CalcDelP')
calcDP.SetParameterValue(NUMBSIG_PAR, 4)
calcDP.SetParameterValue(TRANSFORM_EQT_PAR + str(0), 'x[1]*x[3]/0.00981/x[2]')
calcDP.SetParameterValue(TRANSFORM_EQT_PAR + str(1), 'x[0]*x[2]*0.00981/x[3]')
# x[0] = head
# x[1] = delta pressure
# x[2] = molecular weight
# x[3] = molarVol
# A molarVol sensor for calculating delP from head
self.MolarVolSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.MolarVolSensor, 'MolarVolSensor')
self.MolarVolSensor.SetParameterValue(SIGTYPE_PAR, molarV_VAR)
# A MW sensor for calculating delP from head
self.MWSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.MWSensor, 'MWSensor')
self.MWSensor.SetParameterValue(SIGTYPE_PAR, MOLE_WT)
# An energy sensor for setting the Q from the lookup table
self.TotalQ = Sensor.EnergySensor()
self.AddUnitOperation(self.TotalQ, 'TotalQ')
# Connect them all
self.ConnectPorts('FlowSensor', OUT_PORT, 'MolarVolSensor', IN_PORT)
self.ConnectPorts('MolarVolSensor', OUT_PORT, 'MWSensor', IN_PORT)
self.ConnectPorts('MWSensor', OUT_PORT, 'InPSensor', IN_PORT)
self.ConnectPorts('InPSensor', OUT_PORT, 'IsenthalpicCompressor', IN_PORT)
self.ConnectPorts('IsenthalpicCompressor', IN_PORT + 'Q', 'TotalQ', OUT_PORT)
self.ConnectPorts('IsenthalpicCompressor', OUT_PORT, 'OutPSensor', IN_PORT)
self.ConnectPorts('InPSensor', SIG_PORT, 'SetP', SIG_PORT + '0')
self.ConnectPorts('OutPSensor', SIG_PORT, 'SetP', SIG_PORT + '1')
self.ConnectPorts('FlowSensor', SIG_PORT, 'LookupTable', SIG_PORT + '0')
self.ConnectPorts('TotalQ', SIG_PORT, 'LookupTable', SIG_PORT + '3')
self.ConnectPorts('CalcDelP', SIG_PORT + '1', 'SetP', Set.ADD_PORT)
self.ConnectPorts('CalcDelP', SIG_PORT + '2', 'MWSensor', SIG_PORT)
self.ConnectPorts('CalcDelP', SIG_PORT + '3', 'MolarVolSensor', SIG_PORT)
self.BorrowChildPort(self.TotalQ.GetPort(IN_PORT), IN_PORT + 'Q')
self.BorrowChildPort(self.OutPSensor.GetPort(OUT_PORT), OUT_PORT)
self.BorrowChildPort(self.FlowSensor.GetPort(IN_PORT), IN_PORT)
self.BorrowChildPort(self.HCompressor.GetPort(DELTAP_PORT), DELTAP_PORT)
self.BorrowChildPort(self.LookupTable.GetPort(SPEC_TAG_PORT), self.GetNameForSpeedPort())
# Change the type of the energy port such that it is in Work units and scaling
self.TotalQ.GetPort(IN_PORT).GetProperty().SetTypeByName(WORK_VAR)
# Adaibatic efficiency signals
# Do not connect the efficiency port to the look up table here. Let the parameter setting do it
adEffSt = self.effStream = Stream.Stream_Signal()
adEffSt.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(adEffSt, 'EfficiencySig')
adEffSt.AddObject(Stream.ClonePort(), 'effClone')
adEffSt.GetPort(OUT_PORT).ConnectTo(compressor.GetPort(ADIABATIC_EFF_PORT))
self.BorrowChildPort(adEffSt.GetPort('effClone'), ADIABATIC_EFF_PORT)
# Polytropic efficiency signals
# Do not connect the efficiency port to the look up table here. Let the parameter setting do it
polEffSt = self.polEffStream = Stream.Stream_Signal()
polEffSt.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(polEffSt, 'PolytropicEffSig')
polEffSt.AddObject(Stream.ClonePort(), 'effClone')
polEffSt.GetPort(OUT_PORT).ConnectTo(compressor.GetPort(POLYTROPIC_EFF_PORT))
self.BorrowChildPort(polEffSt.GetPort('effClone'), POLYTROPIC_EFF_PORT)
# self.ConnectPorts('EfficiencySig', IN_PORT, 'LookupTable', SIG_PORT + '2')
# self.ConnectPorts('EfficiencySig', OUT_PORT, 'IsenthalpicCompressor', ADIABATIC_EFF_PORT)
# clone and borrow the lookuptable's head port
headStream = self.headStream = Stream.Stream_Signal()
headStream.SetParameterValue(SIGTYPE_PAR, LENGTH_VAR)
self.AddUnitOperation(headStream, 'HeadSig')
headStream.AddObject(Stream.ClonePort(), 'headClone')
headStream.GetPort(IN_PORT).ConnectTo(lookupTable.GetPort(SIG_PORT + '1'))
headStream.GetPort(OUT_PORT).ConnectTo(calcDP.GetPort(SIG_PORT + '0'))
self.BorrowChildPort(headStream.GetPort('headClone'), HEAD_PORT)
# self.ConnectPorts('HeadSig', IN_PORT, 'LookupTable', SIG_PORT + '1')
# self.ConnectPorts('HeadSig', OUT_PORT, 'CalcDelP', SIG_PORT + '0')
# parameters
# default: no Compressor curve
self.SetParameterValue(NUMBTABLE_PAR, 0)
self.SetParameterValue(EFFCURVETYPE_PAR, ADIABATIC_TYPE)
def CleanUp(self):
self.HCompressor = self.InPSensor = self.OutPSensor = None
self.SetP = self.LookupTable = self.FlowSensor = None
self.CalcDelP = self.MolarVolSensor = self.MWSensor = None
self.TotalQ = self.effStream = self.headStream = None
self.polEffStream = None
super(CompressorWithCurve, self).CleanUp()
def GetNameForSpeedPort(self):
"""Name chosen for the speed port"""
return COMPRESSORSPEED_PORT
def GetObject(self, name):
# shortcut to access the Compressor curves
# CompressorWithCurve.EfficiencyCurveX vs CompressorWithCurve.LookupTable.TableX.Series2
# where X is the curve table
if name == EFFICIENCY_PORT:
name = ADIABATIC_EFF_PORT
obj = UnitOperations.UnitOperation.GetObject(self, name)
if not obj:
if name[:len(FLOW_SERIES)] == FLOW_SERIES:
idx = name[len(FLOW_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(0))
elif name[:len(HEAD_SERIES)] == HEAD_SERIES:
idx = name[len(HEAD_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(1))
elif name[:len(EFFICIENCY_SERIES)] == EFFICIENCY_SERIES:
idx = name[len(EFFICIENCY_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(2))
elif name[:len(POWER_SERIES)] == POWER_SERIES:
idx = name[len(POWER_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(3))
elif name[:len(self.GetNameForSpeedPort())] == self.GetNameForSpeedPort():
idx = name[len(self.GetNameForSpeedPort()):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(TABLETAG_VAR)
return obj
def GetContents(self):
result = super(CompressorWithCurve, self).GetContents()
# result.append(('LookupTable', self.LookupTable))
for i in range(self.LookupTable.GetTableCount()):
tbl = self.LookupTable.GetObject(TABLE_OBJ + str(i))
result.append(('%s%d' % (FLOW_SERIES, i), tbl.GetObject(SERIES_OBJ + str(0))))
result.append(('%s%d' % (HEAD_SERIES, i), tbl.GetObject(SERIES_OBJ + str(1))))
result.append(('%s%d' % (EFFICIENCY_SERIES, i), tbl.GetObject(SERIES_OBJ + str(2))))
result.append(('%s%d' % (POWER_SERIES, i), tbl.GetObject(SERIES_OBJ + str(3))))
result.append(('%s%d' % (self.GetNameForSpeedPort(), i), tbl.GetObject(TABLETAG_VAR)))
return result
def SetParameterValue(self, paramName, value):
"""Set the value of a parameter"""
if paramName == NUMBTABLE_PAR:
self.LookupTable.SetParameterValue(paramName, value)
super(CompressorWithCurve, self).SetParameterValue(paramName, value)
elif paramName == IGNORECURVE_PAR:
# ...ignore the lookuptable and remove any specifications
if value == 'None':
value = None
self.LookupTable.SetParameterValue(IGNORED_PAR, value)
if value:
port = self.GetPort(HEAD_PORT)
port.SetValue(None, FIXED_V)
elif paramName == EFFCURVETYPE_PAR:
if value != ADIABATIC_TYPE and value != POLYTROPIC_TYPE:
value = ADIABATIC_TYPE
if EFFCURVETYPE_PAR in self.parameters:
if self.parameters[EFFCURVETYPE_PAR] == value:
return
# Put the parameter in
super(CompressorWithCurve, self).SetParameterValue(paramName, value)
# Update the connection of the efficiency curve
self.UpdateEffCurveConnection()
else:
super(CompressorWithCurve, self).SetParameterValue(paramName, value)
def DeleteObject(self, obj):
# Can not delete eff curve type parameter
if isinstance(obj, UnitOperations.OpParameter):
if obj.GetName() == EFFCURVETYPE_PAR:
self.InfoMessage('CantDeleteObject', (obj.GetPath(),), MessageHandler.errorMessage)
return
super(CompressorWithCurve, self).DeleteObject(obj)
def UpdateEffCurveConnection(self):
"""The efficiency curves can be of adiabatic or polytropic type. therefore, the Lookuptable should
connect to the proper port"""
type_of = self.GetParameterValue(EFFCURVETYPE_PAR)
if type_of != ADIABATIC_TYPE and type_of != POLYTROPIC_TYPE:
self.parameters[EFFCURVETYPE_PAR] = ADIABATIC_TYPE
type_of = ADIABATIC_TYPE
if type_of == ADIABATIC_TYPE:
self.effStream.GetPort(IN_PORT).ConnectTo(self.LookupTable.GetPort(SIG_PORT + '2'))
else:
self.polEffStream.GetPort(IN_PORT).ConnectTo(self.LookupTable.GetPort(SIG_PORT + '2'))
def Minus(self, varName):
# remove a Compressor curve
if varName[:len(COMPRESSORCURVE_OBJ)] == COMPRESSORCURVE_OBJ:
# try:
idx = int(varName[len(COMPRESSORCURVE_OBJ):])
if self.LookupTable.Minus(idx):
n = self.LookupTable.GetParameterValue(NUMBTABLE_PAR)
self.SetParameterValue(NUMBTABLE_PAR, n)
# except:
# pass
class ExpanderWithCurve(UnitOperations.UnitOperation):
def __init__(self, initScript=None):
# A Expander
# with Expander curves, do not preserve entropy
super(ExpanderWithCurve, self).__init__(initScript)
expander = self.HExpander = Expander()
self.AddUnitOperation(self.HExpander, 'IsenthalpicExpander')
expander.SetParameterValue(ISENTROPIC_PAR, 0)
# Inlet P sensor
self.InPSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.InPSensor, 'InPSensor')
self.InPSensor.SetParameterValue(SIGTYPE_PAR, P_VAR)
# Outlet P sensor
self.OutPSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.OutPSensor, 'OutPSensor')
self.OutPSensor.SetParameterValue(SIGTYPE_PAR, P_VAR)
# A set to set the delP between the inlet and outlet P
self.SetP = Set.Set()
self.AddUnitOperation(self.SetP, 'SetP')
self.SetP.SetParameterValue(SIGTYPE_PAR, P_VAR)
self.SetP.GetPort(Set.MULT_PORT).SetValue(1.0, FIXED_V)
# A table lookup unit
lookupTable = self.LookupTable = Pump.LookupTable()
self.AddUnitOperation(lookupTable, 'LookupTable')
lookupTable.SetParameterValue(NUMBSERIES_PAR, 4) # 4 series: vol flow, head, efficiency and power
lookupTable.SetParameterValue(EXTRAPOLATE_PAR + str(2), 0) # do not extrapolate efficiency
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(0), VOLFLOW_VAR)
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(1), LENGTH_VAR)
lookupTable.SetParameterValue(SERIESTYPE_PAR + str(3), ENERGY_VAR) # actually power
lookupTable.SetParameterValue(TABLETAGTYPE_PAR, GENERIC_VAR) # i do not yet has a RPM
# A flow sensor
self.FlowSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.FlowSensor, 'FlowSensor')
self.FlowSensor.SetParameterValue(SIGTYPE_PAR, VOLFLOW_VAR)
# An equation unit to convert the head to delP
# delP = MW * 0.00981 * head / molarVol
# Note cannot back out mass density from head and delta pressure
calcDP = self.CalcDelP = Pump.EquationUnit()
self.AddUnitOperation(calcDP, 'CalcDelP')
calcDP.SetParameterValue(NUMBSIG_PAR, 4)
calcDP.SetParameterValue(TRANSFORM_EQT_PAR + str(0), '-x[1]*x[3]/0.00981/x[2]')
calcDP.SetParameterValue(TRANSFORM_EQT_PAR + str(1), '-x[0]*x[2]*0.00981/x[3]')
# x[0] = head
# x[1] = delta pressure
# x[2] = molecular weight
# x[3] = molarVol
# A molarVol sensor for calculating delP from head
self.MolarVolSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.MolarVolSensor, 'MolarVolSensor')
self.MolarVolSensor.SetParameterValue(SIGTYPE_PAR, molarV_VAR)
# A MW sensor for calculating delP from head
self.MWSensor = Sensor.PropertySensor()
self.AddUnitOperation(self.MWSensor, 'MWSensor')
self.MWSensor.SetParameterValue(SIGTYPE_PAR, MOLE_WT)
# An energy sensor for setting the Q from the lookup table
self.TotalQ = Sensor.EnergySensor()
self.AddUnitOperation(self.TotalQ, 'TotalQ')
# Connect them all
self.ConnectPorts('FlowSensor', OUT_PORT, 'MolarVolSensor', IN_PORT)
self.ConnectPorts('MolarVolSensor', OUT_PORT, 'MWSensor', IN_PORT)
self.ConnectPorts('MWSensor', OUT_PORT, 'InPSensor', IN_PORT)
self.ConnectPorts('InPSensor', OUT_PORT, 'IsenthalpicExpander', IN_PORT)
# self.ConnectPorts('IsenthalpicExpander', ADIABATIC_EFF_PORT, 'LookupTable', SIG_PORT + '2')
self.ConnectPorts('IsenthalpicExpander', OUT_PORT + 'Q', 'TotalQ', IN_PORT)
self.ConnectPorts('IsenthalpicExpander', OUT_PORT, 'OutPSensor', IN_PORT)
self.ConnectPorts('InPSensor', SIG_PORT, 'SetP', SIG_PORT + '0')
self.ConnectPorts('OutPSensor', SIG_PORT, 'SetP', SIG_PORT + '1')
self.ConnectPorts('FlowSensor', SIG_PORT, 'LookupTable', SIG_PORT + '0')
self.ConnectPorts('TotalQ', SIG_PORT, 'LookupTable', SIG_PORT + '3')
# self.ConnectPorts('CalcDelP', SIG_PORT + '0', 'LookupTable', SIG_PORT + '1')
self.ConnectPorts('CalcDelP', SIG_PORT + '1', 'SetP', Set.ADD_PORT)
self.ConnectPorts('CalcDelP', SIG_PORT + '2', 'MWSensor', SIG_PORT)
self.ConnectPorts('CalcDelP', SIG_PORT + '3', 'MolarVolSensor', SIG_PORT)
self.BorrowChildPort(self.TotalQ.GetPort(OUT_PORT), OUT_PORT + 'Q')
self.BorrowChildPort(self.OutPSensor.GetPort(OUT_PORT), OUT_PORT)
self.BorrowChildPort(self.FlowSensor.GetPort(IN_PORT), IN_PORT)
self.BorrowChildPort(self.HExpander.GetPort(DELTAP_PORT), DELTAP_PORT)
self.BorrowChildPort(self.LookupTable.GetPort(SPEC_TAG_PORT), EXPANDERSPEED_PORT)
# Change the type of the energy port such that it is in Work units and scaling
self.TotalQ.GetPort(OUT_PORT).GetProperty().SetTypeByName(WORK_VAR)
# Adiabatic efficiency signals
# Do not connect the efficiency port to the look up table here. Let the parameter setting do it
adEffSt = self.effStream = Stream.Stream_Signal()
adEffSt.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(adEffSt, 'EfficiencySig')
adEffSt.AddObject(Stream.ClonePort(), 'effClone')
adEffSt.GetPort(OUT_PORT).ConnectTo(expander.GetPort(ADIABATIC_EFF_PORT))
self.BorrowChildPort(adEffSt.GetPort('effClone'), ADIABATIC_EFF_PORT)
# Polytropic efficiency signals
# Do not connect the efficiency port to the look up table here. Let the parameter setting do it
# Connect the In port of the sginal stream to the compressor. For some reason
# the expander does differntly and it connects the cloned port but there is nothing
# wrong with this
polEffSt = self.polEffStream = Stream.Stream_Signal()
polEffSt.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
self.AddUnitOperation(polEffSt, 'PolytropicEffSig')
polEffSt.AddObject(Stream.ClonePort(), 'effClone')
polEffSt.GetPort(OUT_PORT).ConnectTo(expander.GetPort(POLYTROPIC_EFF_PORT))
self.BorrowChildPort(polEffSt.GetPort('effClone'), POLYTROPIC_EFF_PORT)
# self.effStream = Stream.Stream_Signal()
# self.effStream.SetParameterValue(SIGTYPE_PAR, GENERIC_VAR)
# self.AddUnitOperation(self.effStream, 'EfficiencySig')
# effClone = Stream.ClonePort()
# self.effStream.AddObject(effClone, 'effClone')
# self.ConnectPorts('EfficiencySig', IN_PORT, 'LookupTable', SIG_PORT + '2')
# self.ConnectPorts('EfficiencySig', OUT_PORT, 'IsenthalpicExpander', ADIABATIC_EFF_PORT)
# self.BorrowChildPort(self.effStream.GetPort('effClone'), ADIABATIC_EFF_PORT)
# self.BorrowChildPort(self.HExpander.GetPort(POLYTROPIC_EFF_PORT), POLYTROPIC_EFF_PORT)
# clone and borrow the lookuptable's head port
headStream = self.headStream = Stream.Stream_Signal()
headStream.SetParameterValue(SIGTYPE_PAR, LENGTH_VAR)
self.AddUnitOperation(headStream, 'HeadSig')
headStream.AddObject(Stream.ClonePort(), 'headClone')
headStream.GetPort(IN_PORT).ConnectTo(lookupTable.GetPort(SIG_PORT + '1'))
headStream.GetPort(OUT_PORT).ConnectTo(calcDP.GetPort(SIG_PORT + '0'))
self.BorrowChildPort(headStream.GetPort('headClone'), HEAD_PORT)
# parameters
# default: no Expander curve
self.SetParameterValue(NUMBTABLE_PAR, 0)
self.SetParameterValue(EFFCURVETYPE_PAR, ADIABATIC_TYPE)
def CleanUp(self):
self.HExpander = self.InPSensor = self.OutPSensor = None
self.SetP = self.LookupTable = self.FlowSensor = self.CalcDelP = None
self.MolarVolSensor = self.MWSensor = self.TotalQ = None
self.effStream = effClone = self.headStream = None
self.polEffStream = None
super(ExpanderWithCurve, self).CleanUp()
def GetObject(self, name):
# shortcut to access the Expander curves
# ExpanderWithCurve.EfficiencyCurveX vs ExpanderWithCurve.LookupTable.TableX.Series2
# where X is the curve table
if name == EFFICIENCY_PORT:
name = ADIABATIC_EFF_PORT
obj = UnitOperations.UnitOperation.GetObject(self, name)
if not obj:
if name[:len(FLOW_SERIES)] == FLOW_SERIES:
idx = name[len(FLOW_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(0))
elif name[:len(HEAD_SERIES)] == HEAD_SERIES:
idx = name[len(HEAD_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(1))
elif name[:len(EFFICIENCY_SERIES)] == EFFICIENCY_SERIES:
idx = name[len(EFFICIENCY_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(2))
elif name[:len(POWER_SERIES)] == POWER_SERIES:
idx = name[len(POWER_SERIES):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(SERIES_OBJ + str(3))
elif name[:len(EXPANDERSPEED_PORT)] == EXPANDERSPEED_PORT:
idx = name[len(EXPANDERSPEED_PORT):]
tbl = self.LookupTable.GetObject(TABLE_OBJ + idx)
if tbl:
obj = tbl.GetObject(TABLETAG_VAR)
return obj
def GetContents(self):
result = super(ExpanderWithCurve, self).GetContents()
# result.append(('LookupTable', self.LookupTable))
for i in range(self.LookupTable.GetTableCount()):
tbl = self.LookupTable.GetObject(TABLE_OBJ + str(i))
result.append(('%s%d' % (FLOW_SERIES, i), tbl.GetObject(SERIES_OBJ + str(0))))
result.append(('%s%d' % (HEAD_SERIES, i), tbl.GetObject(SERIES_OBJ + str(1))))
result.append(('%s%d' % (EFFICIENCY_SERIES, i), tbl.GetObject(SERIES_OBJ + str(2))))
result.append(('%s%d' % (POWER_SERIES, i), tbl.GetObject(SERIES_OBJ + str(3))))
result.append(('%s%d' % (EXPANDERSPEED_PORT, i), tbl.GetObject(TABLETAG_VAR)))
return result
def SetParameterValue(self, paramName, value):
"""Set the value of a parameter"""
if paramName == NUMBTABLE_PAR:
UnitOperations.UnitOperation.SetParameterValue(self, paramName, value)
self.LookupTable.SetParameterValue(paramName, value)
self.ForgetAllCalculations()
if paramName == IGNORECURVE_PAR:
# ...ignore the lookuptable and remove any specifications
if value == 'None':
value = None
self.LookupTable.SetParameterValue(IGNORED_PAR, value)
if value:
port = self.GetPort(HEAD_PORT)
port.SetValue(None, FIXED_V)
elif paramName == EFFCURVETYPE_PAR:
if value != ADIABATIC_TYPE and value != POLYTROPIC_TYPE:
value = ADIABATIC_TYPE
if EFFCURVETYPE_PAR in self.parameters:
if self.parameters[EFFCURVETYPE_PAR] == value:
return
# Put the parameter in
super(ExpanderWithCurve, self).SetParameterValue(paramName, value)
# Update the connection of the efficiency curve
self.UpdateEffCurveConnection()
else:
super(ExpanderWithCurve, self).SetParameterValue(paramName, value)
def DeleteObject(self, obj):
# Can not delete eff curve type parameter
if isinstance(obj, UnitOperations.OpParameter):
if obj.GetName() == EFFCURVETYPE_PAR:
self.InfoMessage('CantDeleteObject', (obj.GetPath(),), MessageHandler.errorMessage)
return
super(ExpanderWithCurve, self).DeleteObject(obj)
def UpdateEffCurveConnection(self):
"""The efficiency curves can be of adiabatic or polytropic type. therefore, the Lookuptable should
connect to the proper port"""
type_of = self.GetParameterValue(EFFCURVETYPE_PAR)
if type_of != ADIABATIC_TYPE and type_of != POLYTROPIC_TYPE:
self.parameters[EFFCURVETYPE_PAR] = ADIABATIC_TYPE
type_of = ADIABATIC_TYPE
if type_of == ADIABATIC_TYPE:
self.effStream.GetPort(IN_PORT).ConnectTo(self.LookupTable.GetPort(SIG_PORT + '2'))
else:
self.polEffStream.GetPort(IN_PORT).ConnectTo(self.LookupTable.GetPort(SIG_PORT + '2'))
def Minus(self, varName):
# remove a Expander curve
if varName[:len(EXPANDERCURVE_OBJ)] == EXPANDERCURVE_OBJ:
try:
idx = int(varName[len(EXPANDERCURVE_OBJ):])
if self.LookupTable.Minus(idx):
n = self.LookupTable.GetParameterValue(NUMBTABLE_PAR)
self.SetParameterValue(NUMBTABLE_PAR, n)
except:
pass
| 41.974768
| 119
| 0.616841
| 6,991
| 63,214
| 5.460878
| 0.071664
| 0.010451
| 0.020117
| 0.008487
| 0.8656
| 0.857977
| 0.843021
| 0.835136
| 0.826335
| 0.822197
| 0
| 0.013072
| 0.283561
| 63,214
| 1,505
| 120
| 42.002658
| 0.829892
| 0.1257
| 0
| 0.809254
| 0
| 0
| 0.035744
| 0.004083
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044381
| false
| 0.002833
| 0.004721
| 0
| 0.107649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d0e693fe4919ef15511a56808f6101c231b1c92
| 4,129
|
py
|
Python
|
main/configuration/local_config.py
|
anderswodenker/sams-app
|
d9ab3474a5b1f009afe803d14484c000059b61c3
|
[
"MIT"
] | null | null | null |
main/configuration/local_config.py
|
anderswodenker/sams-app
|
d9ab3474a5b1f009afe803d14484c000059b61c3
|
[
"MIT"
] | 1
|
2020-12-10T14:40:57.000Z
|
2020-12-10T14:40:57.000Z
|
main/configuration/local_config.py
|
anderswodenker/sams-app
|
d9ab3474a5b1f009afe803d14484c000059b61c3
|
[
"MIT"
] | null | null | null |
import configparser
import mapping
class LocalConfig:
def __init__(self):
self.path = mapping.app_config
self.config = configparser.ConfigParser()
self.config.read(self.path)
self.config.sections()
# DEFAULT
self.start = self.config['DEFAULT'].getboolean('start')
self.ignore_error = self.config['DEFAULT'].getboolean('ignore_error')
self.is_update = self.config['DEFAULT'].getboolean('update')
self.group = self.config['DEFAULT']['group']
self.is_dht22 = self.config['DEFAULT'].getboolean('is_dht22')
self.is_scale = self.config['DEFAULT'].getboolean('is_scale')
self.is_microphone = self.config['DEFAULT'].getboolean('is_microphone')
self.is_ds18b20 = self.config['DEFAULT'].getboolean('is_ds18b20')
self.auto_update = self.config['DEFAULT'].getboolean('auto_update')
self.auto_shutdown = self.config['DEFAULT'].getboolean('auto_shutdown')
self.debug = self.config['DEFAULT'].getboolean('debug')
self.is_online = self.config['DEFAULT'].getboolean('is_online')
self.timezone = self.config['DEFAULT']['timezone']
self.version = self.config['DEFAULT']['version']
# SCALE
self.scale_ratio = self.config['SCALE']['ratio']
self.scale_offset = self.config['SCALE']['offset']
self.scale_calibrated = self.config['SCALE'].getboolean('calibrated')
# INTERVAL
self.interval_median = self.config['INTERVAL']['median']
self.interval_app_wait_seconds = self.config['INTERVAL']['app_wait_seconds']
self.interval_attempts_before_restart = self.config['INTERVAL']['attempts_before_restart']
# DHT 22
self.dht22_pin = self.config['DHT22']['dht22_pin']
# AUDIO
self.audio_duration = self.config['AUDIO']['duration']
self.audio_fs = self.config['AUDIO']['fs']
def get_config_data(self):
try:
self.config.read(self.path)
# DEFAULT
self.group = self.config['DEFAULT']['group']
self.ignore_error = self.config['DEFAULT'].getboolean('ignore_error')
self.is_dht22 = self.config['DEFAULT'].getboolean('is_dht22')
self.is_scale = self.config['DEFAULT'].getboolean('is_scale')
self.is_microphone = self.config['DEFAULT'].getboolean('is_microphone')
self.is_ds18b20 = self.config['DEFAULT'].getboolean('is_ds18b20')
self.auto_update = self.config['DEFAULT'].getboolean('auto_update')
self.auto_shutdown = self.config['DEFAULT'].getboolean('auto_shutdown')
self.debug = self.config['DEFAULT'].getboolean('debug')
self.is_online = self.config['DEFAULT'].getboolean('is_online')
self.timezone = self.config['DEFAULT']['timezone']
self.version = self.config['DEFAULT']['version']
# SCALE
self.scale_ratio = self.config['SCALE']['ratio']
self.scale_offset = self.config['SCALE']['offset']
self.scale_calibrated = self.config['SCALE'].getboolean('calibrated')
# INTERVAL
self.interval_median = self.config['INTERVAL']['median']
self.interval_app_wait_seconds = self.config['INTERVAL']['app_wait_seconds']
self.interval_attempts_before_restart = self.config['INTERVAL']['attempts_before_restart']
# DHT 22
self.dht22_pin = self.config['DHT22']['dht22_pin']
# AUDIO
self.audio_duration = self.config['AUDIO']['duration']
self.audio_fs = self.config['AUDIO']['fs']
return True
except IOError:
return False
def set_config_data(self, section, key, value):
self.config.set(section, key, str(value))
self.write_config()
def set_update(self):
self.config.set("DEFAULT", "update", "1")
self.write_config()
def write_config(self):
try:
with open(self.path, 'w') as configfile:
self.config.write(configfile)
except IOError:
pass
| 42.56701
| 102
| 0.624849
| 460
| 4,129
| 5.430435
| 0.147826
| 0.204163
| 0.176942
| 0.216173
| 0.806645
| 0.775821
| 0.775821
| 0.751001
| 0.751001
| 0.751001
| 0
| 0.012901
| 0.230322
| 4,129
| 96
| 103
| 43.010417
| 0.773128
| 0.017195
| 0
| 0.714286
| 0
| 0
| 0.173956
| 0.011366
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0.014286
| 0.028571
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3d1b7e34fed30d5695a548e3857c3cee807348ee
| 693
|
py
|
Python
|
templates/includes/css.py
|
angeal185/python-website-created-with-flask-and-jinja
|
5a7c62941d5456e5c87c137fb58d06fc1ff10bcc
|
[
"MIT"
] | null | null | null |
templates/includes/css.py
|
angeal185/python-website-created-with-flask-and-jinja
|
5a7c62941d5456e5c87c137fb58d06fc1ff10bcc
|
[
"MIT"
] | null | null | null |
templates/includes/css.py
|
angeal185/python-website-created-with-flask-and-jinja
|
5a7c62941d5456e5c87c137fb58d06fc1ff10bcc
|
[
"MIT"
] | null | null | null |
<link href="https://maxcdn.bootstrapcdn.com/font-awesome/4.7.0/css/font-awesome.min.css" rel="stylesheet" type="text/css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/fancybox/2.1.5/jquery.fancybox.css" rel="stylesheet" type="text/css" media="screen">
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css" rel="stylesheet" type="text/css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/flickity/1.0.0/flickity.css" rel="stylesheet" type="text/css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/animate.css/3.5.2/animate.min.css" rel="stylesheet" type="text/css">
<link href="{{ meta.staticCss }}style.css" rel="stylesheet" type="text/css">
| 99
| 136
| 0.734488
| 113
| 693
| 4.504425
| 0.309735
| 0.094303
| 0.188605
| 0.235756
| 0.715128
| 0.715128
| 0.475442
| 0.475442
| 0.475442
| 0.400786
| 0
| 0.022727
| 0.047619
| 693
| 6
| 137
| 115.5
| 0.748485
| 0
| 0
| 0
| 0
| 0.833333
| 0.724891
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1856fc0bc3cfa683b67a70c98d3bc8888c2385e3
| 439
|
py
|
Python
|
tracking/__init__.py
|
sherna90/object-tracking
|
f60703dfa674601c575c26fc23c278dfc4f855f1
|
[
"Apache-2.0"
] | 2
|
2021-05-07T01:00:59.000Z
|
2021-05-10T19:53:02.000Z
|
tracking/__init__.py
|
sherna90/object-tracking
|
f60703dfa674601c575c26fc23c278dfc4f855f1
|
[
"Apache-2.0"
] | null | null | null |
tracking/__init__.py
|
sherna90/object-tracking
|
f60703dfa674601c575c26fc23c278dfc4f855f1
|
[
"Apache-2.0"
] | null | null | null |
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'utils'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'models'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'features'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'detectors'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'libs'))
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'vot'))
| 54.875
| 72
| 0.733485
| 76
| 439
| 3.921053
| 0.197368
| 0.241611
| 0.261745
| 0.281879
| 0.825503
| 0.825503
| 0.825503
| 0.825503
| 0.825503
| 0.825503
| 0
| 0.014354
| 0.047836
| 439
| 8
| 73
| 54.875
| 0.698565
| 0
| 0
| 0
| 0
| 0
| 0.079545
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
186775bda205c8f0c2c56daee2df8278616a4bde
| 85
|
py
|
Python
|
locomotion_analysis/src/workbench.py
|
sciple/neurobau
|
b54e24ad0f717b7506cc948876cba47f6768318c
|
[
"MIT"
] | 2
|
2020-05-29T02:14:40.000Z
|
2020-11-20T18:06:42.000Z
|
locomotion_analysis/src/workbench.py
|
sciple/neurobau
|
b54e24ad0f717b7506cc948876cba47f6768318c
|
[
"MIT"
] | null | null | null |
locomotion_analysis/src/workbench.py
|
sciple/neurobau
|
b54e24ad0f717b7506cc948876cba47f6768318c
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
def mister():
return 12,3
| 9.444444
| 19
| 0.576471
| 13
| 85
| 3.769231
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0.376471
| 85
| 8
| 20
| 10.625
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
43e523dd06316c3fd9845cd29054d7bf317df47d
| 12,328
|
py
|
Python
|
tests/kbcr/models/test_models.py
|
moonbzyx/ctp
|
e5f663352124ac8033912c5867cc5ac2cecbb662
|
[
"MIT"
] | 1
|
2020-09-23T11:53:50.000Z
|
2020-09-23T11:53:50.000Z
|
tests/kbcr/models/test_models.py
|
moonbzyx/ctp
|
e5f663352124ac8033912c5867cc5ac2cecbb662
|
[
"MIT"
] | null | null | null |
tests/kbcr/models/test_models.py
|
moonbzyx/ctp
|
e5f663352124ac8033912c5867cc5ac2cecbb662
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import torch
from torch import nn
from kbcr.kernels import GaussianKernel
from kbcr.models import DistMult, ComplEx, NeuralKB
from kbcr.models.reasoning import SimpleHoppy, RecursiveHoppy, Hoppy
from kbcr.reformulators import LinearReformulator, AttentiveReformulator
import pytest
@pytest.mark.light
def test_distmult_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
init_size = 1.0
rs = np.random.RandomState(0)
for _ in range(128):
with torch.no_grad():
entity_embeddings = nn.Embedding(nb_entities, embedding_size, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size, sparse=True)
entity_embeddings.weight.data *= init_size
predicate_embeddings.weight.data *= init_size
model = DistMult(entity_embeddings)
xs = torch.from_numpy(rs.randint(nb_entities, size=32))
xp = torch.from_numpy(rs.randint(nb_predicates, size=32))
xo = torch.from_numpy(rs.randint(nb_entities, size=32))
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
@pytest.mark.light
def test_complex_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
init_size = 1.0
rs = np.random.RandomState(0)
for _ in range(128):
with torch.no_grad():
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
entity_embeddings.weight.data *= init_size
predicate_embeddings.weight.data *= init_size
model = ComplEx(entity_embeddings)
xs = torch.from_numpy(rs.randint(nb_entities, size=32))
xp = torch.from_numpy(rs.randint(nb_predicates, size=32))
xo = torch.from_numpy(rs.randint(nb_entities, size=32))
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
@pytest.mark.light
def test_neuralkb_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
rs = np.random.RandomState(0)
for _ in range(32):
for st in ['min', 'concat']:
with torch.no_grad():
triples = [
('a', 'p', 'b'),
('c', 'q', 'd')
]
entity_to_index = {'a': 0, 'b': 1, 'c': 2, 'd': 3}
predicate_to_index = {'p': 0, 'q': 1}
kernel = GaussianKernel()
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
fact_rel = torch.from_numpy(np.array([predicate_to_index[p] for (_, p, _) in triples]))
fact_arg1 = torch.from_numpy(np.array([entity_to_index[s] for (s, _, _) in triples]))
fact_arg2 = torch.from_numpy(np.array([entity_to_index[o] for (_, _, o) in triples]))
facts = [fact_rel, fact_arg1, fact_arg2]
model = NeuralKB(entity_embeddings=entity_embeddings, predicate_embeddings=predicate_embeddings,
kernel=kernel, facts=facts, scoring_type=st)
xs_np = rs.randint(nb_entities, size=32)
xp_np = rs.randint(nb_predicates, size=32)
xo_np = rs.randint(nb_entities, size=32)
xs_np[0] = 0
xp_np[0] = 0
xo_np[0] = 1
xs_np[1] = 2
xp_np[1] = 1
xo_np[1] = 3
xs = torch.from_numpy(xs_np)
xp = torch.from_numpy(xp_np)
xo = torch.from_numpy(xo_np)
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
assert inf[0] > 0.9
assert inf[1] > 0.9
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
@pytest.mark.light
def test_hoppy_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
rs = np.random.RandomState(0)
for _ in range(16):
for nb_hops in range(6):
for use_attention in [True, False]:
with torch.no_grad():
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
base = ComplEx(entity_embeddings)
if use_attention:
reformulator = AttentiveReformulator(nb_hops, predicate_embeddings)
else:
reformulator = LinearReformulator(nb_hops, embedding_size * 2)
model = SimpleHoppy(base, entity_embeddings, hops=reformulator)
xs = torch.from_numpy(rs.randint(nb_entities, size=32))
xp = torch.from_numpy(rs.randint(nb_predicates, size=32))
xo = torch.from_numpy(rs.randint(nb_entities, size=32))
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
@pytest.mark.light
def test_rhoppy_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
rs = np.random.RandomState(0)
for _ in range(8):
for nb_hops in range(3):
for depth in range(3):
for use_attention in [True, False]:
with torch.no_grad():
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
base = ComplEx(entity_embeddings)
if use_attention:
reformulator = AttentiveReformulator(nb_hops, predicate_embeddings)
else:
reformulator = LinearReformulator(nb_hops, embedding_size * 2)
model = RecursiveHoppy(model=base,
entity_embeddings=entity_embeddings,
hops=reformulator,
depth=depth)
xs = torch.from_numpy(rs.randint(nb_entities, size=32))
xp = torch.from_numpy(rs.randint(nb_predicates, size=32))
xo = torch.from_numpy(rs.randint(nb_entities, size=32))
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
@pytest.mark.light
def test_multirhoppy_v1():
nb_entities = 10
nb_predicates = 5
embedding_size = 10
init_size = 1.0
rs = np.random.RandomState(0)
for _ in range(8):
for nb_hops_lst in [[1], [2], [3], [1, 2], [2, 2], [3, 2], [1, 2, 2], [2, 2, 2], [3, 2, 2]]:
for depth in range(3):
for use_attention in [True, False]:
with torch.no_grad():
entity_embeddings = nn.Embedding(nb_entities, embedding_size * 2, sparse=True)
predicate_embeddings = nn.Embedding(nb_predicates, embedding_size * 2, sparse=True)
entity_embeddings.weight.data *= init_size
predicate_embeddings.weight.data *= init_size
base = ComplEx(entity_embeddings)
hops_lst = []
for i in nb_hops_lst:
if use_attention:
reformulator = AttentiveReformulator(i, predicate_embeddings)
else:
reformulator = LinearReformulator(i, embedding_size * 2)
hops_lst += [(reformulator, False)]
model = Hoppy(model=base,
entity_embeddings=entity_embeddings,
hops_lst=hops_lst,
depth=depth)
xs = torch.from_numpy(rs.randint(nb_entities, size=32))
xp = torch.from_numpy(rs.randint(nb_predicates, size=32))
xo = torch.from_numpy(rs.randint(nb_entities, size=32))
xs_emb = entity_embeddings(xs)
xp_emb = predicate_embeddings(xp)
xo_emb = entity_embeddings(xo)
scores = model.forward(xp_emb, xs_emb, xo_emb)
inf = model.score(xp_emb, xs_emb, xo_emb)
scores_sp, scores_po = scores
inf = inf.cpu().numpy()
scores_sp = scores_sp.cpu().numpy()
scores_po = scores_po.cpu().numpy()
for i in range(xs.shape[0]):
np.testing.assert_allclose(inf[i], scores_sp[i, xo[i]], rtol=1e-5, atol=1e-5)
np.testing.assert_allclose(inf[i], scores_po[i, xs[i]], rtol=1e-5, atol=1e-5)
if __name__ == '__main__':
pytest.main([__file__])
| 38.167183
| 112
| 0.535529
| 1,473
| 12,328
| 4.24372
| 0.082824
| 0.084466
| 0.047032
| 0.038394
| 0.838906
| 0.81075
| 0.81075
| 0.782435
| 0.771557
| 0.771557
| 0
| 0.027226
| 0.362427
| 12,328
| 322
| 113
| 38.285714
| 0.768066
| 0.001703
| 0
| 0.727273
| 0
| 0
| 0.002357
| 0
| 0
| 0
| 0
| 0
| 0.060606
| 1
| 0.025974
| false
| 0
| 0.034632
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1260ce55b0c0ce222a19265e28784a27620f498
| 168,230
|
py
|
Python
|
gitea_client/api/organization_api.py
|
kedros-as/gitea_client
|
1d2979bed356f172a843c05b305423cf084f7f56
|
[
"BSD-3-Clause"
] | null | null | null |
gitea_client/api/organization_api.py
|
kedros-as/gitea_client
|
1d2979bed356f172a843c05b305423cf084f7f56
|
[
"BSD-3-Clause"
] | null | null | null |
gitea_client/api/organization_api.py
|
kedros-as/gitea_client
|
1d2979bed356f172a843c05b305423cf084f7f56
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from gitea_client.api_client import ApiClient
class OrganizationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_org_repo(self, org, **kwargs): # noqa: E501
"""Create a repository in an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_org_repo(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of organization (required)
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_org_repo_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.create_org_repo_with_http_info(org, **kwargs) # noqa: E501
return data
def create_org_repo_with_http_info(self, org, **kwargs): # noqa: E501
"""Create a repository in an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_org_repo_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of organization (required)
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_org_repo" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `create_org_repo`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/repos', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Repository', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_org_repo_deprecated(self, org, **kwargs): # noqa: E501
"""Create a repository in an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_org_repo_deprecated(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of organization (required)
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_org_repo_deprecated_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.create_org_repo_deprecated_with_http_info(org, **kwargs) # noqa: E501
return data
def create_org_repo_deprecated_with_http_info(self, org, **kwargs): # noqa: E501
"""Create a repository in an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_org_repo_deprecated_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of organization (required)
:param CreateRepoOption body:
:return: Repository
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_org_repo_deprecated" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `create_org_repo_deprecated`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/org/{org}/repos', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Repository', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_add_team_member(self, id, username, **kwargs): # noqa: E501
"""Add a team member # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_add_team_member(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the user to add (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_add_team_member_with_http_info(id, username, **kwargs) # noqa: E501
else:
(data) = self.org_add_team_member_with_http_info(id, username, **kwargs) # noqa: E501
return data
def org_add_team_member_with_http_info(self, id, username, **kwargs): # noqa: E501
"""Add a team member # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_add_team_member_with_http_info(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the user to add (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_add_team_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_add_team_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_add_team_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/members/{username}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_add_team_repository(self, id, org, repo, **kwargs): # noqa: E501
"""Add a repository to a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_add_team_repository(id, org, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str org: organization that owns the repo to add (required)
:param str repo: name of the repo to add (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_add_team_repository_with_http_info(id, org, repo, **kwargs) # noqa: E501
else:
(data) = self.org_add_team_repository_with_http_info(id, org, repo, **kwargs) # noqa: E501
return data
def org_add_team_repository_with_http_info(self, id, org, repo, **kwargs): # noqa: E501
"""Add a repository to a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_add_team_repository_with_http_info(id, org, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str org: organization that owns the repo to add (required)
:param str repo: name of the repo to add (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'org', 'repo'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_add_team_repository" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_add_team_repository`") # noqa: E501
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_add_team_repository`") # noqa: E501
# verify the required parameter 'repo' is set
if ('repo' not in params or
params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `org_add_team_repository`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'repo' in params:
path_params['repo'] = params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/repos/{org}/{repo}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_conceal_member(self, org, username, **kwargs): # noqa: E501
"""Conceal a user's membership # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_conceal_member(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_conceal_member_with_http_info(org, username, **kwargs) # noqa: E501
else:
(data) = self.org_conceal_member_with_http_info(org, username, **kwargs) # noqa: E501
return data
def org_conceal_member_with_http_info(self, org, username, **kwargs): # noqa: E501
"""Conceal a user's membership # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_conceal_member_with_http_info(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_conceal_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_conceal_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_conceal_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/public_members/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_create(self, organization, **kwargs): # noqa: E501
"""Create an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create(organization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateOrgOption organization: (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_create_with_http_info(organization, **kwargs) # noqa: E501
else:
(data) = self.org_create_with_http_info(organization, **kwargs) # noqa: E501
return data
def org_create_with_http_info(self, organization, **kwargs): # noqa: E501
"""Create an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_with_http_info(organization, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateOrgOption organization: (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['organization'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_create" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'organization' is set
if ('organization' not in params or
params['organization'] is None):
raise ValueError("Missing the required parameter `organization` when calling `org_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'organization' in params:
body_params = params['organization']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Organization', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_create_hook(self, org, body, **kwargs): # noqa: E501
"""Create a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_hook(org, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateHookOption body: (required)
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_create_hook_with_http_info(org, body, **kwargs) # noqa: E501
else:
(data) = self.org_create_hook_with_http_info(org, body, **kwargs) # noqa: E501
return data
def org_create_hook_with_http_info(self, org, body, **kwargs): # noqa: E501
"""Create a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_hook_with_http_info(org, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateHookOption body: (required)
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_create_hook" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_create_hook`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `org_create_hook`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/hooks/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Hook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_create_label(self, org, **kwargs): # noqa: E501
"""Create a label for an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_label(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateLabelOption body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_create_label_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_create_label_with_http_info(org, **kwargs) # noqa: E501
return data
def org_create_label_with_http_info(self, org, **kwargs): # noqa: E501
"""Create a label for an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_label_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateLabelOption body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_create_label" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_create_label`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/labels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_create_team(self, org, **kwargs): # noqa: E501
"""Create a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_team(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateTeamOption body:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_create_team_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_create_team_with_http_info(org, **kwargs) # noqa: E501
return data
def org_create_team_with_http_info(self, org, **kwargs): # noqa: E501
"""Create a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_create_team_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param CreateTeamOption body:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_create_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_create_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/teams', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_delete(self, org, **kwargs): # noqa: E501
"""Delete an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: organization that is to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_delete_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_delete_with_http_info(org, **kwargs) # noqa: E501
return data
def org_delete_with_http_info(self, org, **kwargs): # noqa: E501
"""Delete an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: organization that is to be deleted (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_delete_hook(self, org, id, **kwargs): # noqa: E501
"""Delete a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_hook(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_delete_hook_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_delete_hook_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_delete_hook_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Delete a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_hook_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_delete_hook" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_delete_hook`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_delete_hook`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/hooks/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_delete_label(self, org, id, **kwargs): # noqa: E501
"""Delete a label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_label(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_delete_label_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_delete_label_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_delete_label_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Delete a label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_label_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_delete_label" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_delete_label`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_delete_label`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/labels/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_delete_member(self, org, username, **kwargs): # noqa: E501
"""Remove a member from an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_member(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_delete_member_with_http_info(org, username, **kwargs) # noqa: E501
else:
(data) = self.org_delete_member_with_http_info(org, username, **kwargs) # noqa: E501
return data
def org_delete_member_with_http_info(self, org, username, **kwargs): # noqa: E501
"""Remove a member from an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_member_with_http_info(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_delete_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_delete_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_delete_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/members/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_delete_team(self, id, **kwargs): # noqa: E501
"""Delete a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_team(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_delete_team_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.org_delete_team_with_http_info(id, **kwargs) # noqa: E501
return data
def org_delete_team_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_delete_team_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_delete_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_delete_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_edit(self, org, body, **kwargs): # noqa: E501
"""Edit an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit(org, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization to edit (required)
:param EditOrgOption body: (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_edit_with_http_info(org, body, **kwargs) # noqa: E501
else:
(data) = self.org_edit_with_http_info(org, body, **kwargs) # noqa: E501
return data
def org_edit_with_http_info(self, org, body, **kwargs): # noqa: E501
"""Edit an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_with_http_info(org, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization to edit (required)
:param EditOrgOption body: (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_edit" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_edit`") # noqa: E501
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `org_edit`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Organization', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_edit_hook(self, org, id, **kwargs): # noqa: E501
"""Update a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_hook(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to update (required)
:param EditHookOption body:
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_edit_hook_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_edit_hook_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_edit_hook_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Update a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_hook_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to update (required)
:param EditHookOption body:
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_edit_hook" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_edit_hook`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_edit_hook`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/hooks/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Hook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_edit_label(self, org, id, **kwargs): # noqa: E501
"""Update a label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_label(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to edit (required)
:param EditLabelOption body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_edit_label_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_edit_label_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_edit_label_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Update a label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_label_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to edit (required)
:param EditLabelOption body:
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_edit_label" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_edit_label`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_edit_label`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/labels/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_edit_team(self, id, **kwargs): # noqa: E501
"""Edit a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_team(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to edit (required)
:param EditTeamOption body:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_edit_team_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.org_edit_team_with_http_info(id, **kwargs) # noqa: E501
return data
def org_edit_team_with_http_info(self, id, **kwargs): # noqa: E501
"""Edit a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_edit_team_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to edit (required)
:param EditTeamOption body:
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_edit_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_edit_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_get(self, org, **kwargs): # noqa: E501
"""Get an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization to get (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_get_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_get_with_http_info(org, **kwargs) # noqa: E501
return data
def org_get_with_http_info(self, org, **kwargs): # noqa: E501
"""Get an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization to get (required)
:return: Organization
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Organization', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_get_all(self, **kwargs): # noqa: E501
"""Get list of organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_all(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_get_all_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.org_get_all_with_http_info(**kwargs) # noqa: E501
return data
def org_get_all_with_http_info(self, **kwargs): # noqa: E501
"""Get list of organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_all_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_get_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Organization]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_get_hook(self, org, id, **kwargs): # noqa: E501
"""Get a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_hook(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to get (required)
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_get_hook_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_get_hook_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_get_hook_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Get a hook # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_hook_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the hook to get (required)
:return: Hook
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_get_hook" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_get_hook`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_get_hook`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/hooks/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Hook', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_get_label(self, org, id, **kwargs): # noqa: E501
"""Get a single label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_label(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to get (required)
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_get_label_with_http_info(org, id, **kwargs) # noqa: E501
else:
(data) = self.org_get_label_with_http_info(org, id, **kwargs) # noqa: E501
return data
def org_get_label_with_http_info(self, org, id, **kwargs): # noqa: E501
"""Get a single label # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_label_with_http_info(org, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int id: id of the label to get (required)
:return: Label
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_get_label" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_get_label`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_get_label`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/labels/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Label', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_get_team(self, id, **kwargs): # noqa: E501
"""Get a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_team(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to get (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_get_team_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.org_get_team_with_http_info(id, **kwargs) # noqa: E501
return data
def org_get_team_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_get_team_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team to get (required)
:return: Team
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_get_team" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_get_team`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Team', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_is_member(self, org, username, **kwargs): # noqa: E501
"""Check if a user is a member of an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_is_member(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_is_member_with_http_info(org, username, **kwargs) # noqa: E501
else:
(data) = self.org_is_member_with_http_info(org, username, **kwargs) # noqa: E501
return data
def org_is_member_with_http_info(self, org, username, **kwargs): # noqa: E501
"""Check if a user is a member of an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_is_member_with_http_info(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_is_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_is_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_is_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/members/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_is_public_member(self, org, username, **kwargs): # noqa: E501
"""Check if a user is a public member of an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_is_public_member(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_is_public_member_with_http_info(org, username, **kwargs) # noqa: E501
else:
(data) = self.org_is_public_member_with_http_info(org, username, **kwargs) # noqa: E501
return data
def org_is_public_member_with_http_info(self, org, username, **kwargs): # noqa: E501
"""Check if a user is a public member of an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_is_public_member_with_http_info(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_is_public_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_is_public_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_is_public_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json', 'text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/public_members/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_current_user_orgs(self, **kwargs): # noqa: E501
"""List the current user's organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_current_user_orgs(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_current_user_orgs_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.org_list_current_user_orgs_with_http_info(**kwargs) # noqa: E501
return data
def org_list_current_user_orgs_with_http_info(self, **kwargs): # noqa: E501
"""List the current user's organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_current_user_orgs_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_current_user_orgs" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/user/orgs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Organization]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_hooks(self, org, **kwargs): # noqa: E501
"""List an organization's webhooks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_hooks(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Hook]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_hooks_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_hooks_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_hooks_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's webhooks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_hooks_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Hook]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_hooks" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_hooks`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/hooks', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Hook]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_labels(self, org, **kwargs): # noqa: E501
"""List an organization's labels # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_labels(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Label]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_labels_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_labels_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_labels_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's labels # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_labels_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Label]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_labels" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_labels`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Label]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_members(self, org, **kwargs): # noqa: E501
"""List an organization's members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_members(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_members_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_members_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_members_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_members_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_public_members(self, org, **kwargs): # noqa: E501
"""List an organization's public members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_public_members(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_public_members_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_public_members_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_public_members_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's public members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_public_members_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_public_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_public_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/public_members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_repos(self, org, **kwargs): # noqa: E501
"""List an organization's repos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_repos(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_repos_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_repos_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_repos_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's repos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_repos_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_repos" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_repos`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/repos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_team_member(self, id, username, **kwargs): # noqa: E501
"""List a particular member of team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_member(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the member to list (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_team_member_with_http_info(id, username, **kwargs) # noqa: E501
else:
(data) = self.org_list_team_member_with_http_info(id, username, **kwargs) # noqa: E501
return data
def org_list_team_member_with_http_info(self, id, username, **kwargs): # noqa: E501
"""List a particular member of team # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_member_with_http_info(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the member to list (required)
:return: User
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_team_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_list_team_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_list_team_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/members/{username}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='User', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_team_members(self, id, **kwargs): # noqa: E501
"""List a team's members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_members(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_team_members_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.org_list_team_members_with_http_info(id, **kwargs) # noqa: E501
return data
def org_list_team_members_with_http_info(self, id, **kwargs): # noqa: E501
"""List a team's members # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_members_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[User]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_team_members" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_list_team_members`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/members', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[User]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_team_repos(self, id, **kwargs): # noqa: E501
"""List a team's repos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_repos(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_team_repos_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.org_list_team_repos_with_http_info(id, **kwargs) # noqa: E501
return data
def org_list_team_repos_with_http_info(self, id, **kwargs): # noqa: E501
"""List a team's repos # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_team_repos_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Repository]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_team_repos" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_list_team_repos`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/repos', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Repository]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_teams(self, org, **kwargs): # noqa: E501
"""List an organization's teams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_teams(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Team]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_teams_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.org_list_teams_with_http_info(org, **kwargs) # noqa: E501
return data
def org_list_teams_with_http_info(self, org, **kwargs): # noqa: E501
"""List an organization's teams # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_teams_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Team]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_teams" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_list_teams`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/teams', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Team]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_list_user_orgs(self, username, **kwargs): # noqa: E501
"""List a user's organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_user_orgs(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_list_user_orgs_with_http_info(username, **kwargs) # noqa: E501
else:
(data) = self.org_list_user_orgs_with_http_info(username, **kwargs) # noqa: E501
return data
def org_list_user_orgs_with_http_info(self, username, **kwargs): # noqa: E501
"""List a user's organizations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_list_user_orgs_with_http_info(username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: username of user (required)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: list[Organization]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['username', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_list_user_orgs" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_list_user_orgs`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/users/{username}/orgs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Organization]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_publicize_member(self, org, username, **kwargs): # noqa: E501
"""Publicize a user's membership # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_publicize_member(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_publicize_member_with_http_info(org, username, **kwargs) # noqa: E501
else:
(data) = self.org_publicize_member_with_http_info(org, username, **kwargs) # noqa: E501
return data
def org_publicize_member_with_http_info(self, org, username, **kwargs): # noqa: E501
"""Publicize a user's membership # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_publicize_member_with_http_info(org, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str username: username of the user (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_publicize_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_publicize_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_publicize_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/public_members/{username}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_remove_team_member(self, id, username, **kwargs): # noqa: E501
"""Remove a team member # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_remove_team_member(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the user to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_remove_team_member_with_http_info(id, username, **kwargs) # noqa: E501
else:
(data) = self.org_remove_team_member_with_http_info(id, username, **kwargs) # noqa: E501
return data
def org_remove_team_member_with_http_info(self, id, username, **kwargs): # noqa: E501
"""Remove a team member # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_remove_team_member_with_http_info(id, username, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str username: username of the user to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'username'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_remove_team_member" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_remove_team_member`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in params or
params['username'] is None):
raise ValueError("Missing the required parameter `username` when calling `org_remove_team_member`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'username' in params:
path_params['username'] = params['username'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/members/{username}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def org_remove_team_repository(self, id, org, repo, **kwargs): # noqa: E501
"""Remove a repository from a team # noqa: E501
This does not delete the repository, it only removes the repository from the team. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_remove_team_repository(id, org, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str org: organization that owns the repo to remove (required)
:param str repo: name of the repo to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.org_remove_team_repository_with_http_info(id, org, repo, **kwargs) # noqa: E501
else:
(data) = self.org_remove_team_repository_with_http_info(id, org, repo, **kwargs) # noqa: E501
return data
def org_remove_team_repository_with_http_info(self, id, org, repo, **kwargs): # noqa: E501
"""Remove a repository from a team # noqa: E501
This does not delete the repository, it only removes the repository from the team. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.org_remove_team_repository_with_http_info(id, org, repo, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: id of the team (required)
:param str org: organization that owns the repo to remove (required)
:param str repo: name of the repo to remove (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'org', 'repo'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method org_remove_team_repository" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `org_remove_team_repository`") # noqa: E501
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `org_remove_team_repository`") # noqa: E501
# verify the required parameter 'repo' is set
if ('repo' not in params or
params['repo'] is None):
raise ValueError("Missing the required parameter `repo` when calling `org_remove_team_repository`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
if 'repo' in params:
path_params['repo'] = params['repo'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/teams/{id}/repos/{org}/{repo}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def team_search(self, org, **kwargs): # noqa: E501
"""Search for teams within an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_search(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str q: keywords to search
:param bool include_desc: include search within team description (defaults to true)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.team_search_with_http_info(org, **kwargs) # noqa: E501
else:
(data) = self.team_search_with_http_info(org, **kwargs) # noqa: E501
return data
def team_search_with_http_info(self, org, **kwargs): # noqa: E501
"""Search for teams within an organization # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.team_search_with_http_info(org, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str org: name of the organization (required)
:param str q: keywords to search
:param bool include_desc: include search within team description (defaults to true)
:param int page: page number of results to return (1-based)
:param int limit: page size of results, maximum page size is 50
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['org', 'q', 'include_desc', 'page', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method team_search" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'org' is set
if ('org' not in params or
params['org'] is None):
raise ValueError("Missing the required parameter `org` when calling `team_search`") # noqa: E501
collection_formats = {}
path_params = {}
if 'org' in params:
path_params['org'] = params['org'] # noqa: E501
query_params = []
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'include_desc' in params:
query_params.append(('include_desc', params['include_desc'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json', 'text/plain']) # noqa: E501
# Authentication setting
auth_settings = ['AccessToken', 'AuthorizationHeaderToken', 'BasicAuth', 'SudoHeader', 'SudoParam', 'Token'] # noqa: E501
return self.api_client.call_api(
'/orgs/{org}/teams/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.121631
| 130
| 0.597914
| 19,620
| 168,230
| 4.901376
| 0.011315
| 0.051744
| 0.023293
| 0.029949
| 0.991057
| 0.989216
| 0.987854
| 0.985047
| 0.982405
| 0.980627
| 0
| 0.016709
| 0.304161
| 168,230
| 4,192
| 131
| 40.131202
| 0.804786
| 0.309172
| 0
| 0.843441
| 1
| 0
| 0.200193
| 0.044301
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034839
| false
| 0
| 0.00172
| 0
| 0.088602
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a18e005bab43dc37cab0238f8769f69d1bec51e1
| 152
|
py
|
Python
|
pycor/std/korstandard.py
|
ontolog-so/pycor
|
25820c1e48c3cad279baea87bc0c21bf508f99b5
|
[
"BSD-3-Clause"
] | 2
|
2018-08-25T16:18:52.000Z
|
2022-01-09T12:40:13.000Z
|
pycor/std/korstandard.py
|
ontolog-so/pycor
|
25820c1e48c3cad279baea87bc0c21bf508f99b5
|
[
"BSD-3-Clause"
] | null | null | null |
pycor/std/korstandard.py
|
ontolog-so/pycor
|
25820c1e48c3cad279baea87bc0c21bf508f99b5
|
[
"BSD-3-Clause"
] | null | null | null |
import pycor.std.aux
import pycor.std.suffix
import pycor.std.stem
import pycor.std.single
import pycor.std.irregular
import pycor.std.collocation
| 25.333333
| 29
| 0.809211
| 24
| 152
| 5.125
| 0.375
| 0.536585
| 0.682927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.111842
| 152
| 6
| 29
| 25.333333
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a1f87f39c5940b66c5e23929d802860e8b14c592
| 35,400
|
py
|
Python
|
test_autoarray/unit/operators/inversion/test_integration.py
|
Sketos/PyAutoArray
|
72dc7e8d1c38786915f82a7e7284239e5ce87624
|
[
"MIT"
] | null | null | null |
test_autoarray/unit/operators/inversion/test_integration.py
|
Sketos/PyAutoArray
|
72dc7e8d1c38786915f82a7e7284239e5ce87624
|
[
"MIT"
] | null | null | null |
test_autoarray/unit/operators/inversion/test_integration.py
|
Sketos/PyAutoArray
|
72dc7e8d1c38786915f82a7e7284239e5ce87624
|
[
"MIT"
] | null | null | null |
import autoarray as aa
from autoarray.structures import grids
from autoarray.operators.inversion import mappers
import numpy as np
import pytest
class TestRectangular:
def test__5_simple_grid__no_sub_grid(self):
mask = np.array(
[
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, False, True, True, True],
[True, True, False, False, False, True, True],
[True, True, True, False, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
]
)
mask = aa.mask.manual(mask_2d=mask, pixel_scales=1.0, sub_size=1)
# Source-plane comprises 5 grid, so 5 masked_image pixels traced to the pix-plane.
grid = aa.masked.grid.manual_1d(
grid=np.array(
[[1.0, -1.0], [1.0, 1.0], [0.0, 0.0], [-1.0, -1.0], [-1.0, 1.0]]
),
mask=mask,
)
# There is no sub-grid, so our grid are just the masked_image grid (note the NumPy weighted_data structure
# ensures this has no sub-gridding)
pix = aa.pix.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid,
sparse_grid=None,
inversion_uses_border=False,
hyper_image=np.ones((2, 2)),
)
assert mapper.is_image_plane_pixelization == False
assert mapper.grid.shape_2d_scaled == pytest.approx((2.0, 2.0), 1.0e-4)
assert mapper.grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert (
mapper.mapping_matrix
== np.array(
[
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
]
)
).all()
assert mapper.shape_2d == (3, 3)
assert (mapper.hyper_image == np.ones((2, 2))).all()
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[2.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 2.00000001, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 0.0, 0.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 4.00000001, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, 0.0, 0.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 2.00000001, -1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 2.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(7, 7))
noise_map = aa.array.ones(shape_2d=(7, 7))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__15_grid__no_sub_grid(self):
mask = np.array(
[
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, False, False, False, False, False, True],
[True, False, False, False, False, False, True],
[True, False, False, False, False, False, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
]
)
mask = aa.mask.manual(mask_2d=mask, pixel_scales=1.0, sub_size=1)
# There is no sub-grid, so our grid are just the masked_image grid (note the NumPy weighted_data structure
# ensures this has no sub-gridding)
grid = aa.masked.grid.manual_1d(
grid=np.array(
[
[0.9, -0.9],
[1.0, -1.0],
[1.1, -1.1],
[0.9, 0.9],
[1.0, 1.0],
[1.1, 1.1],
[-0.01, 0.01],
[0.0, 0.0],
[0.01, 0.01],
[-0.9, -0.9],
[-1.0, -1.0],
[-1.1, -1.1],
[-0.9, 0.9],
[-1.0, 1.0],
[-1.1, 1.1],
]
),
mask=mask,
)
pix = aa.pix.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=None, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == False
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.2, 2.2), 1.0e-4
)
assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert (
mapper.mapping_matrix
== np.array(
[
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
]
)
).all()
assert mapper.shape_2d == (3, 3)
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[2.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 2.00000001, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 0.0, 0.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 4.00000001, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, 0.0, 0.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 2.00000001, -1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 2.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(7, 7))
noise_map = aa.array.ones(shape_2d=(7, 7))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(
np.ones(15), 1.0e-4
)
def test__5_simple_grid__include_sub_grid(self):
mask = np.array(
[
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, False, True, True, True],
[True, True, False, False, False, True, True],
[True, True, True, False, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
]
)
mask = aa.mask.manual(mask_2d=mask, pixel_scales=2.0, sub_size=2)
# Assume a 2x2 sub-grid, so each of our 5 masked_image-pixels are split into 4.
# The grid below is unphysical in that the (0.0, 0.0) terms on the end of each sub-grid probably couldn't
# happen for a real lens calculation. This is to make a mapping_matrix matrix which explicitly tests the
# sub-grid.
grid = aa.masked.grid.manual_1d(
grid=np.array(
[
[1.0, -1.0],
[1.0, -1.0],
[1.0, -1.0],
[1.0, 1.0],
[1.0, 1.0],
[1.0, 1.0],
[-1.0, -1.0],
[-1.0, -1.0],
[-1.0, -1.0],
[-1.0, 1.0],
[-1.0, 1.0],
[-1.0, 1.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
[0.0, 0.0],
]
),
mask=mask,
)
pix = aa.pix.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=None, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == False
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.0, 2.0), 1.0e-4
)
assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert (
mapper.mapping_matrix
== np.array(
[
[0.75, 0.0, 0.25, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.25, 0.0, 0.75],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
]
)
).all()
assert mapper.shape_2d == (3, 3)
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[2.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 2.00000001, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 0.0, 0.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 4.00000001, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, 0.0, 0.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 2.00000001, -1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 2.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(7, 7))
noise_map = aa.array.ones(shape_2d=(7, 7))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__grid__requires_border_relocation(self):
mask = np.array(
[
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, False, True, True, True],
[True, True, False, False, False, True, True],
[True, True, True, False, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
]
)
mask = aa.mask.manual(mask_2d=mask, pixel_scales=1.0, sub_size=1)
grid = aa.masked.grid.manual_1d(
grid=np.array(
[[1.0, 1.0], [1.0, 1.0], [1.0, 1.0], [1.0, 1.0], [-1.0, -1.0]]
),
mask=mask,
)
pix = aa.pix.Rectangular(shape=(3, 3))
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=None, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == False
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.0, 2.0), 1.0e-4
)
assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert (
mapper.mapping_matrix
== np.array(
[
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
]
)
).all()
assert mapper.shape_2d == (3, 3)
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[2.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 2.00000001, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 0.0, 0.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 4.00000001, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, 0.0, 0.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 2.00000001, -1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 2.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(7, 7))
noise_map = aa.array.ones(shape_2d=(7, 7))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__interferometer(self):
visibilities_mask = np.full(fill_value=False, shape=(7, 2))
real_space_mask = np.array(
[
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
]
)
real_space_mask = aa.mask.manual(
mask_2d=real_space_mask, pixel_scales=0.1, sub_size=1
)
grid = aa.masked.grid.from_mask(mask=real_space_mask)
pix = aa.pix.Rectangular(shape=(7, 7))
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=None, inversion_uses_border=False
)
reg = aa.reg.Constant(coefficient=0.0)
visibilities = aa.visibilities.manual_1d(
visibilities=[
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
]
)
noise_map = aa.visibilities.ones(shape_1d=(7,))
uv_wavelengths = np.ones(shape=(7, 2))
interferometer = aa.interferometer(
visibilities=visibilities,
noise_map=noise_map,
uv_wavelengths=uv_wavelengths,
)
masked_data = aa.masked.interferometer(
interferometer=interferometer,
visibilities_mask=visibilities_mask,
real_space_mask=real_space_mask,
)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert inversion.mapped_reconstructed_visibilities[:, 0] == pytest.approx(
np.ones(shape=(7,)), 1.0e-4
)
assert inversion.mapped_reconstructed_visibilities[:, 1] == pytest.approx(
np.zeros(shape=(7,)), 1.0e-4
)
class TestVoronoiMagnification:
def test__3x3_simple_grid(self):
mask = aa.mask.manual(
mask_2d=np.array(
[
[True, True, True, True, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, False, False, False, True],
[True, True, True, True, True],
]
),
pixel_scales=1.0,
sub_size=1,
)
grid = np.array(
[
[1.0, -1.0],
[1.0, 0.0],
[1.0, 1.0],
[0.0, -1.0],
[0.0, 0.0],
[0.0, 1.0],
[-1.0, -1.0],
[-1.0, 0.0],
[-1.0, 1.0],
]
)
grid = aa.masked.grid.manual_1d(grid=grid, mask=mask)
pix = aa.pix.VoronoiMagnification(shape=(3, 3))
sparse_grid = grids.SparseGrid.from_grid_and_unmasked_2d_grid_shape(
grid=grid, unmasked_sparse_shape=pix.shape
)
pixelization_grid = aa.grid_voronoi(
grid_1d=sparse_grid.sparse,
nearest_pixelization_1d_index_for_mask_1d_index=sparse_grid.sparse_1d_index_for_mask_1d_index,
)
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid,
sparse_grid=pixelization_grid,
inversion_uses_border=False,
hyper_image=np.ones((2, 2)),
)
assert mapper.is_image_plane_pixelization == True
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.0, 2.0), 1.0e-4
)
assert (mapper.pixelization_grid == sparse_grid.sparse).all()
# assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert (mapper.hyper_image == np.ones((2, 2))).all()
assert isinstance(mapper, mappers.MapperVoronoi)
assert (
mapper.mapping_matrix
== np.array(
[
[1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
]
)
).all()
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[2.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, -1.0, 2.00000001, 0.0, 0.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 0.0, 0.0, 3.00000001, -1.0, 0.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 0.0, -1.0, 4.00000001, -1.0, 0.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, 0.0, 0.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 0.0, 0.0, 2.00000001, -1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, -1.0, 0.0, -1.0, 2.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(5, 5))
noise_map = aa.array.ones(shape_2d=(5, 5))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(9), 1.0e-4)
def test__3x3_simple_grid__include_mask(self):
mask = aa.mask.manual(
mask_2d=np.array(
[
[True, True, True, True, True],
[True, True, False, True, True],
[True, False, False, False, True],
[True, True, False, True, True],
[True, True, True, True, True],
]
),
pixel_scales=1.0,
sub_size=1,
)
grid = np.array([[1.0, 0.0], [0.0, -1.0], [0.0, 0.0], [0.0, 1.0], [-1.0, 0.0]])
grid = aa.masked.grid.manual_1d(grid=grid, mask=mask)
pix = aa.pix.VoronoiMagnification(shape=(3, 3))
sparse_grid = grids.SparseGrid.from_grid_and_unmasked_2d_grid_shape(
grid=grid, unmasked_sparse_shape=pix.shape
)
pixelization_grid = aa.grid_voronoi(
grid_1d=sparse_grid.sparse,
nearest_pixelization_1d_index_for_mask_1d_index=sparse_grid.sparse_1d_index_for_mask_1d_index,
)
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=pixelization_grid, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == True
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.0, 2.0), 1.0e-4
)
assert (mapper.pixelization_grid == sparse_grid.sparse).all()
# assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.0), 1.0e-4)
assert isinstance(mapper, mappers.MapperVoronoi)
assert (
mapper.mapping_matrix
== np.array(
[
[1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0],
]
)
).all()
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[3.00000001, -1.0, -1.0, -1.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0],
[-1.0, -1.0, 4.00000001, -1.0, -1.0],
[-1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, -1.0, -1.0, -1.0, 3.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(5, 5))
noise_map = aa.array.ones(shape_2d=(5, 5))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__3x3_simple_grid__include_mask_and_sub_grid(self):
mask = aa.mask.manual(
mask_2d=np.array(
[
[True, True, True, True, True],
[True, True, False, True, True],
[True, False, False, False, True],
[True, True, False, True, True],
[True, True, True, True, True],
]
),
pixel_scales=1.0,
sub_size=2,
)
grid = np.array(
[
[1.01, 0.0],
[1.01, 0.0],
[1.01, 0.0],
[0.01, 0.0],
[0.0, -1.0],
[0.0, -1.0],
[0.0, -1.0],
[0.01, 0.0],
[0.01, 0.0],
[0.01, 0.0],
[0.01, 0.0],
[0.01, 0.0],
[0.0, 1.01],
[0.0, 1.01],
[0.0, 1.01],
[0.01, 0.0],
[-1.01, 0.0],
[-1.01, 0.0],
[-1.01, 0.0],
[0.01, 0.0],
]
)
grid = aa.masked.grid.manual_1d(grid=grid, mask=mask)
pix = aa.pix.VoronoiMagnification(shape=(3, 3))
sparse_grid = grids.SparseGrid.from_grid_and_unmasked_2d_grid_shape(
grid=grid, unmasked_sparse_shape=pix.shape
)
pixelization_grid = aa.grid_voronoi(
grid_1d=sparse_grid.sparse,
nearest_pixelization_1d_index_for_mask_1d_index=sparse_grid.sparse_1d_index_for_mask_1d_index,
)
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=pixelization_grid, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == True
assert mapper.grid.shape_2d_scaled == pytest.approx((2.02, 2.01), 1.0e-4)
assert (mapper.pixelization_grid == sparse_grid.sparse).all()
# assert mapper.pixelization_grid.origin == pytest.approx((0.0, 0.005), 1.0e-4)
assert isinstance(mapper, mappers.MapperVoronoi)
assert (
mapper.mapping_matrix
== np.array(
[
[0.75, 0.0, 0.25, 0.0, 0.0],
[0.0, 0.75, 0.25, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.25, 0.75, 0.0],
[0.0, 0.0, 0.25, 0.0, 0.75],
]
)
).all()
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[3.00000001, -1.0, -1.0, -1.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0],
[-1.0, -1.0, 4.00000001, -1.0, -1.0],
[-1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, -1.0, -1.0, -1.0, 3.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(5, 5))
noise_map = aa.array.ones(shape_2d=(5, 5))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__3x3_simple_grid__include_mask_with_offset_centre(self):
mask = aa.mask.manual(
mask_2d=np.array(
[
[True, True, True, True, True, True, True],
[True, True, True, True, False, True, True],
[True, True, True, False, False, False, True],
[True, True, True, True, False, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
[True, True, True, True, True, True, True],
]
),
pixel_scales=1.0,
sub_size=1,
)
grid = np.array([[2.0, 1.0], [1.0, 0.0], [1.0, 1.0], [1.0, 2.0], [0.0, 1.0]])
grid = aa.masked.grid.manual_1d(grid=grid, mask=mask)
pix = aa.pix.VoronoiMagnification(shape=(3, 3))
sparse_grid = grids.SparseGrid.from_grid_and_unmasked_2d_grid_shape(
grid=grid, unmasked_sparse_shape=pix.shape
)
pixelization_grid = aa.grid_voronoi(
grid_1d=sparse_grid.sparse,
nearest_pixelization_1d_index_for_mask_1d_index=sparse_grid.sparse_1d_index_for_mask_1d_index,
)
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=pixelization_grid, inversion_uses_border=False
)
assert mapper.is_image_plane_pixelization == True
assert mapper.pixelization_grid.shape_2d_scaled == pytest.approx(
(2.0, 2.0), 1.0e-4
)
assert (mapper.pixelization_grid == sparse_grid.sparse).all()
# assert mapper.pixelization_grid.origin == pytest.approx((1.0, 1.0), 1.0e-4)
assert isinstance(mapper, mappers.MapperVoronoi)
assert (
mapper.mapping_matrix
== np.array(
[
[1.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0],
]
)
).all()
reg = aa.reg.Constant(coefficient=1.0)
regularization_matrix = reg.regularization_matrix_from_mapper(mapper=mapper)
assert (
regularization_matrix
== np.array(
[
[3.00000001, -1.0, -1.0, -1.0, 0.0],
[-1.0, 3.00000001, -1.0, 0.0, -1.0],
[-1.0, -1.0, 4.00000001, -1.0, -1.0],
[-1.0, 0.0, -1.0, 3.00000001, -1.0],
[0.0, -1.0, -1.0, -1.0, 3.00000001],
]
)
).all()
image = aa.array.ones(shape_2d=(7, 7))
noise_map = aa.array.ones(shape_2d=(7, 7))
psf = aa.kernel.no_blur()
imaging = aa.imaging(image=image, noise_map=noise_map, psf=psf)
masked_data = aa.masked.imaging(imaging=imaging, mask=mask)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert (inversion.blurred_mapping_matrix == mapper.mapping_matrix).all()
assert (inversion.regularization_matrix == regularization_matrix).all()
assert inversion.mapped_reconstructed_image == pytest.approx(np.ones(5), 1.0e-4)
def test__interferometer(self):
visibilities_mask = np.full(fill_value=False, shape=(7, 2))
real_space_mask = np.array(
[
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
[False, False, False, False, False, False, False],
]
)
real_space_mask = aa.mask.manual(
mask_2d=real_space_mask, pixel_scales=0.1, sub_size=1
)
grid = aa.masked.grid.from_mask(mask=real_space_mask)
pix = aa.pix.VoronoiMagnification(shape=(7, 7))
sparse_grid = pix.sparse_grid_from_grid(grid=grid)
mapper = pix.mapper_from_grid_and_sparse_grid(
grid=grid, sparse_grid=sparse_grid, inversion_uses_border=False
)
reg = aa.reg.Constant(coefficient=0.0)
visibilities = aa.visibilities.manual_1d(
visibilities=[
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
[1.0, 0.0],
]
)
noise_map = aa.visibilities.ones(shape_1d=(7,))
uv_wavelengths = np.ones(shape=(7, 2))
interferometer = aa.interferometer(
visibilities=visibilities,
noise_map=noise_map,
uv_wavelengths=uv_wavelengths,
)
masked_data = aa.masked.interferometer(
interferometer=interferometer,
visibilities_mask=visibilities_mask,
real_space_mask=real_space_mask,
)
inversion = aa.inversion(
masked_dataset=masked_data, mapper=mapper, regularization=reg
)
assert inversion.mapped_reconstructed_visibilities[:, 0] == pytest.approx(
np.ones(shape=(7,)), 1.0e-4
)
assert inversion.mapped_reconstructed_visibilities[:, 1] == pytest.approx(
np.zeros(shape=(7,)), 1.0e-4
)
| 37.381204
| 114
| 0.476977
| 5,080
| 35,400
| 3.202756
| 0.030512
| 0.1689
| 0.2126
| 0.231838
| 0.962446
| 0.958082
| 0.955132
| 0.955132
| 0.955132
| 0.946835
| 0
| 0.135924
| 0.36613
| 35,400
| 946
| 115
| 37.420719
| 0.589153
| 0.027401
| 0
| 0.742894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.100775
| 1
| 0.01292
| false
| 0
| 0.00646
| 0
| 0.021964
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
62a2d17d8ebbbc0719b7150cacb04e45e8a4f712
| 26,047
|
py
|
Python
|
sfx.py
|
pandabottle/DiscordMusicBotLearn
|
efdced3b51eda7aec109f2cd99ae81a7fd987cca
|
[
"MIT"
] | null | null | null |
sfx.py
|
pandabottle/DiscordMusicBotLearn
|
efdced3b51eda7aec109f2cd99ae81a7fd987cca
|
[
"MIT"
] | null | null | null |
sfx.py
|
pandabottle/DiscordMusicBotLearn
|
efdced3b51eda7aec109f2cd99ae81a7fd987cca
|
[
"MIT"
] | null | null | null |
import discord
from discord.ext import commands
import youtube_dl
class Sfx(commands.Cog):
def __init__(self,client):
self.client = client
@commands.command(name='boom',help='vine boom sfx')
async def boom(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=829pvBHyG6I'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='chilling',help='Zhong Xina')
async def chilling(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlbc='https://www.youtube.com/watch?v=KH_XIt-hm2Y'
info = ydl.extract_info(urlbc,download=False)
urlbic=info['formats'][0]['url']
sssource = await discord.FFmpegOpusAudio.from_probe(urlbic,**FFMPEG_OPTIONS)
vc.play(sssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='dog',help='what is it doin?')
async def dog(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlbc='https://www.youtube.com/watch?v=SdmfidIYS84'
info = ydl.extract_info(urlbc,download=False)
urlbic=info['formats'][0]['url']
sssource = await discord.FFmpegOpusAudio.from_probe(urlbic,**FFMPEG_OPTIONS)
vc.play(sssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='sus',help='when the imposter')
async def sus(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlbc='https://www.youtube.com/watch?v=ekL881PJMjI'
info = ydl.extract_info(urlbc,download=False)
urlbic=info['formats'][0]['url']
sssource = await discord.FFmpegOpusAudio.from_probe(urlbic,**FFMPEG_OPTIONS)
vc.play(sssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='fart',help='special fart')
async def fart(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=Qi1KebO4bzc'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='mad',help='why u haf to be mad')
async def mad(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=xzpndHtdl9A'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='baby',help='Why are you baby?')
async def baby(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=j3glwtXrj0c'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='lag',help='Your internet')
async def lag(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=E52eC_XoSqI'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='stopit',help='Just stop')
async def stopit(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=GLRJT5IU88s'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='bruh',help='bruh')
async def bruh(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=2ZIpFytCSVc'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='bs',help='What I am hearing from you')
async def bs(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=-TzHx9ByBCs'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='throw',help='Flashbang out')
async def throw(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=X5e2NCWN9ac'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='ded',help='Dark Souls death')
async def ded(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn'}
YDL_OPTIONS={'format':'bestaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=j_nV2jcTFvA'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='hades',help='Hadeez nuts')
async def hades(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn -t 5'}
YDL_OPTIONS={'format':'worstaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=B5leUzYa_qo'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
@commands.command(name='idol',help='Super idol')
async def idol(self,ctx):
voice_channel = ctx.author.voice.channel
voice = ctx.voice_client
if (voice_channel is None): #if user not in VC, send message
await ctx.send("You're not in a VC")
if (voice is None): #if bot not in vc, join the channel
await voice_channel.connect()
if (voice is not None and voice.channel is not voice_channel):
print("aint there. Coming right now")
await voice.move_to(voice_channel)
voice = ctx.voice_client #update the value
else: #else it is already there
ctx.voice_client.stop() #stops previous song if it was playing it
FFMPEG_OPTIONS = {'before_options':'-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5','options':'-vn -t 13.49'}
YDL_OPTIONS={'format':'worstaudio'}
vc = ctx.voice_client
temp = 0
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
"""
info = ydl.extract_info(url,download=False)
url2=info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url2,**FFMPEG_OPTIONS)
"""
urlboom='https://www.youtube.com/watch?v=HvUY80Dhgxo'
iinfo = ydl.extract_info(urlboom,download=False)
urlbm=iinfo['formats'][0]['url']
ssource = await discord.FFmpegOpusAudio.from_probe(urlbm,**FFMPEG_OPTIONS)
vc.play(ssource)
while (vc.is_playing()):
temp = 1
await ctx.voice_client.disconnect()
def setup(client):
client.add_cog(Sfx(client))
| 42.560458
| 132
| 0.57116
| 3,128
| 26,047
| 4.628197
| 0.054348
| 0.087035
| 0.072529
| 0.041445
| 0.944533
| 0.944533
| 0.944533
| 0.944533
| 0.944533
| 0.944533
| 0
| 0.009633
| 0.326487
| 26,047
| 612
| 133
| 42.560458
| 0.815596
| 0.083503
| 0
| 0.866828
| 0
| 0
| 0.156389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.004843
| false
| 0
| 0.007264
| 0
| 0.014528
| 0.03632
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
62b331c18f3220e201897945d67aaa513bf4c549
| 5,632
|
py
|
Python
|
dataset_readers/lstm_cws.py
|
EuphoriaYan/ICCRE
|
0f5a969a0ab195161d1995540a7464d14836ed99
|
[
"MIT"
] | 2
|
2020-07-03T12:22:33.000Z
|
2020-08-03T12:37:35.000Z
|
dataset_readers/lstm_cws.py
|
EuphoriaYan/ICCRE
|
0f5a969a0ab195161d1995540a7464d14836ed99
|
[
"MIT"
] | null | null | null |
dataset_readers/lstm_cws.py
|
EuphoriaYan/ICCRE
|
0f5a969a0ab195161d1995540a7464d14836ed99
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
root_path = "/".join(os.path.realpath(__file__).split("/")[:-2])
if root_path not in sys.path:
sys.path.insert(0, root_path)
class SequentialSampler(object):
pass
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
import csv
import json
import logging
import random
import argparse
import numpy as np
from tqdm import tqdm
from dataset_readers.lstm_data_utils import *
class Ctb6CWSProcessor(LSTMDataProcessor):
# processor for CTB6 CWS dataset
def get_train_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.char.bmes")), "train")
def get_test_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, "test.char.bmes")), "test")
def get_dev_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.char.bmes")), "dev")
def get_labels(self):
return ['E-SEG', 'S-SEG', 'B-SEG', 'M-SEG', ]
def _create_examples(self, lines, set_type):
# create examples for the training and dev sets
examples = []
for (i, line) in enumerate(lines):
if line == "\n":
continue
text_a = line[0]
text_b = None
label = line[1]
label = label.split(" ")
guid = "{}_{}".format("ctb6.cws", str(i))
examples.append(LSTMInputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class PkuCWSProcessor(LSTMDataProcessor):
# processor for PKU CWS dataset
def get_train_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.char.bmes")), "train")
def get_test_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, "test.char.bmes")), "test")
def get_dev_examples(self, data_dir):
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.char.bmes")), "dev")
def get_labels(self):
return ['B-SEG', 'M-SEG', 'S-SEG', 'E-SEG',]
def _create_examples(self, lines, set_type):
# create examples for the trainng and dev sets
examples = []
for (i, line) in enumerate(lines):
if line == "\n":
continue
text_a = line[0]
text_b = None
label = line[1]
label = label.split(" ")
guid = "{}_{}".format("pku.cws", str(i))
examples.append(LSTMInputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class MsrCWSProcessor(LSTMDataProcessor):
# processor for MSR CWS dataset
def get_train_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.char.bmes")), "train")
def get_test_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "test.char.bmes")), "test")
def get_dev_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "dev.char.bmes")), "dev")
def get_labels(self):
return ['S-SEG', 'M-SEG', 'B-SEG', 'E-SEG',]
def _create_examples(self, lines, set_type):
# create examples for the training and dev sets
examples = []
for (i, line) in enumerate(lines):
if line == "\n":
continue
text_a = line[0]
text_b = None
label = line[1]
label = label.split(" ")
guid = "{}_{}".format("mrs.cws", str(i))
examples.append(LSTMInputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class ZuozhuanCWSLSTMProcessor(LSTMDataProcessor):
# processor for Personal classical CWS dataset
@classmethod
def _read_tsv(cls, input_file, quotechar=None):
# reads a tab separated value file.
with open(input_file, "r", encoding='utf-8') as f:
reader = csv.reader(f, delimiter="|", quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
def get_train_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "tb.txt")), "train")
def get_test_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "ts.txt")), "test")
def get_dev_examples(self, data_dir):
# see base class
return self._create_examples(self._read_tsv(os.path.join(data_dir, "ts.txt")), "dev")
def get_labels(self):
return ['B', 'I', ]
def _create_examples(self, lines, set_type):
# create examples for the training and dev sets
examples = []
trans = {"[BOS]": "B",
"[IOS]": "I"}
for (i, line) in enumerate(lines):
if line == "\n":
continue
text_a = line[0]
text_b = None
label = line[1]
label = label.split(" ")
label = [trans[l] for l in label]
guid = "{}_{}".format("per.cws", str(i))
examples.append(LSTMInputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
| 32.744186
| 104
| 0.612571
| 749
| 5,632
| 4.400534
| 0.169559
| 0.101942
| 0.087379
| 0.069175
| 0.731796
| 0.731796
| 0.731796
| 0.723604
| 0.723604
| 0.723604
| 0
| 0.003837
| 0.259588
| 5,632
| 172
| 105
| 32.744186
| 0.786571
| 0.091619
| 0
| 0.621622
| 0
| 0
| 0.065908
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.189189
| false
| 0.009009
| 0.099099
| 0.144144
| 0.522523
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
62c7b7508988541963434cee549c267c2dd4c512
| 569
|
py
|
Python
|
autoPyTorch/utils/metalearning/pipeline/__init__.py
|
urbanmatthias/Auto-PyTorch
|
fe7c51b33c48041e405ef2975ee691c0539691ab
|
[
"BSD-3-Clause"
] | null | null | null |
autoPyTorch/utils/metalearning/pipeline/__init__.py
|
urbanmatthias/Auto-PyTorch
|
fe7c51b33c48041e405ef2975ee691c0539691ab
|
[
"BSD-3-Clause"
] | null | null | null |
autoPyTorch/utils/metalearning/pipeline/__init__.py
|
urbanmatthias/Auto-PyTorch
|
fe7c51b33c48041e405ef2975ee691c0539691ab
|
[
"BSD-3-Clause"
] | null | null | null |
from autoPyTorch.utils.metalearning.pipeline.meta_learning_settings import MetaLearningSettings
from autoPyTorch.utils.metalearning.pipeline.collect import Collect
from autoPyTorch.utils.metalearning.pipeline.meta_learning_fit import MetaLearningFit
from autoPyTorch.utils.metalearning.pipeline.for_run import ForRun
from autoPyTorch.utils.metalearning.pipeline.for_autonet_config import ForAutoNetConfig
from autoPyTorch.utils.metalearning.pipeline.for_instance import ForInstance
from autoPyTorch.utils.metalearning.pipeline.set_autonet_config import SetAutoNetConfig
| 81.285714
| 95
| 0.903339
| 66
| 569
| 7.636364
| 0.348485
| 0.208333
| 0.277778
| 0.444444
| 0.621032
| 0.462302
| 0.206349
| 0
| 0
| 0
| 0
| 0
| 0.047452
| 569
| 7
| 96
| 81.285714
| 0.929889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
62d71c93842ea3461f07d98f49ca7d64c6ebd55a
| 2,040
|
py
|
Python
|
truss/em514_problem08.py
|
rgrandin/MechanicsTools
|
d492b60fb6aec7ccb2de94f08dbc86928baefda9
|
[
"BSD-3-Clause"
] | 1
|
2021-03-15T21:26:12.000Z
|
2021-03-15T21:26:12.000Z
|
truss/em514_problem08.py
|
rgrandin/MechanicsTools
|
d492b60fb6aec7ccb2de94f08dbc86928baefda9
|
[
"BSD-3-Clause"
] | null | null | null |
truss/em514_problem08.py
|
rgrandin/MechanicsTools
|
d492b60fb6aec7ccb2de94f08dbc86928baefda9
|
[
"BSD-3-Clause"
] | 1
|
2020-11-26T09:44:45.000Z
|
2020-11-26T09:44:45.000Z
|
# Generate input data for EM 514, Homework Problem 8
def DefineInputs():
area = 200.0e-6
nodes = [{'x': 0.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'd', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0}]
nodes.append({'x': 3.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
nodes.append({'x': 6.0e0, 'y': 0.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
nodes.append({'x': 3.0e0, 'y': -4.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': -9.0e3, 'zflag': 'd', 'zbcval': 0.0e0})
nodes.append({'x': 6.0e0, 'y': -4.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'f', 'ybcval': -15.0e3, 'zflag': 'd', 'zbcval': 0.0e0})
nodes.append({'x': 9.0e0, 'y': -4.0e0, 'z': 0.0e0, 'xflag': 'f', 'xbcval': 0.0, 'yflag': 'd', 'ybcval': 0.0e0, 'zflag': 'd', 'zbcval': 0.0e0})
members = [{'start': 0, 'end': 1, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6}]
members.append({'start': 1, 'end': 2, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 0, 'end': 3, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 1, 'end': 3, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 2, 'end': 3, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 3, 'end': 4, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 2, 'end': 4, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 2, 'end': 5, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
members.append({'start': 4, 'end': 5, 'E': 200.0e9, 'A': area, 'sigma_yield': 36.0e6, 'sigma_ult': 66.0e6})
return nodes, members
| 92.727273
| 150
| 0.534314
| 343
| 2,040
| 3.125364
| 0.148688
| 0.074627
| 0.058769
| 0.067164
| 0.890858
| 0.890858
| 0.890858
| 0.890858
| 0.874067
| 0.842351
| 0
| 0.145723
| 0.169118
| 2,040
| 22
| 151
| 92.727273
| 0.486726
| 0.02451
| 0
| 0
| 1
| 0
| 0.253394
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c50a6e5b8690a0db241748ff5e6c2841034ad809
| 20,111
|
py
|
Python
|
WikiPricesPB/WikiDailyOHLCV_pb2.py
|
d0xc/TFRecordSharp
|
0d1d9ffab21a289f8c7954d6a4f38fa569503672
|
[
"MIT"
] | null | null | null |
WikiPricesPB/WikiDailyOHLCV_pb2.py
|
d0xc/TFRecordSharp
|
0d1d9ffab21a289f8c7954d6a4f38fa569503672
|
[
"MIT"
] | null | null | null |
WikiPricesPB/WikiDailyOHLCV_pb2.py
|
d0xc/TFRecordSharp
|
0d1d9ffab21a289f8c7954d6a4f38fa569503672
|
[
"MIT"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: WikiDailyOHLCV.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='WikiDailyOHLCV.proto',
package='d1100.data',
syntax='proto3',
serialized_pb=_b('\n\x14WikiDailyOHLCV.proto\x12\nd1100.data\"\xf3\x01\n\x0eWikiDailyOHLCV\x12\x0e\n\x06ticker\x18\x01 \x01(\t\x12\n\n\x02ts\x18\x02 \x01(\x03\x12\x0c\n\x04open\x18\x03 \x01(\x01\x12\x0c\n\x04high\x18\x04 \x01(\x01\x12\x0b\n\x03low\x18\x05 \x01(\x01\x12\r\n\x05\x63lose\x18\x06 \x01(\x01\x12\x0e\n\x06volume\x18\x07 \x01(\x01\x12\x12\n\nexDividend\x18\x08 \x01(\x01\x12\x12\n\nsplitRatio\x18\t \x01(\x01\x12\x0f\n\x07\x61\x64jOpen\x18\n \x01(\x01\x12\x0f\n\x07\x61\x64jHigh\x18\x0b \x01(\x01\x12\x0e\n\x06\x61\x64jLow\x18\x0c \x01(\x01\x12\x10\n\x08\x61\x64jClose\x18\r \x01(\x01\x12\x11\n\tadjVolume\x18\x0e \x01(\x01\"\x86\x02\n\x11WikiDailyColOHLCV\x12\r\n\x05\x64\x61yts\x18\x01 \x01(\x06\x12\x0f\n\x07tickers\x18\x02 \x03(\t\x12\r\n\x05opens\x18\x03 \x03(\x01\x12\r\n\x05highs\x18\x04 \x03(\x01\x12\x0c\n\x04lows\x18\x05 \x03(\x01\x12\x0e\n\x06\x63loses\x18\x06 \x03(\x01\x12\x0f\n\x07volumes\x18\x07 \x03(\x01\x12\x13\n\x0b\x65xDividends\x18\x08 \x03(\x01\x12\x13\n\x0bsplitRatios\x18\t \x03(\x01\x12\x10\n\x08\x61\x64jOpens\x18\n \x03(\x01\x12\x10\n\x08\x61\x64jHighs\x18\x0b \x03(\x01\x12\x0f\n\x07\x61\x64jLows\x18\x0c \x03(\x01\x12\x11\n\tadjCloses\x18\r \x03(\x01\x12\x12\n\nadjVolumes\x18\x0e \x03(\x01\"\x8b\x02\n\x16WikiDailyColBytesOHLCV\x12\r\n\x05\x64\x61yts\x18\x01 \x01(\x06\x12\x0f\n\x07tickers\x18\x02 \x03(\t\x12\r\n\x05opens\x18\x03 \x01(\x0c\x12\r\n\x05highs\x18\x04 \x01(\x0c\x12\x0c\n\x04lows\x18\x05 \x01(\x0c\x12\x0e\n\x06\x63loses\x18\x06 \x01(\x0c\x12\x0f\n\x07volumes\x18\x07 \x01(\x0c\x12\x13\n\x0b\x65xDividends\x18\x08 \x01(\x0c\x12\x13\n\x0bsplitRatios\x18\t \x01(\x0c\x12\x10\n\x08\x61\x64jOpens\x18\n \x01(\x0c\x12\x10\n\x08\x61\x64jHighs\x18\x0b \x01(\x0c\x12\x0f\n\x07\x61\x64jLows\x18\x0c \x01(\x0c\x12\x11\n\tadjCloses\x18\r \x01(\x0c\x12\x12\n\nadjVolumes\x18\x0e \x01(\x0c\x62\x06proto3')
)
_WIKIDAILYOHLCV = _descriptor.Descriptor(
name='WikiDailyOHLCV',
full_name='d1100.data.WikiDailyOHLCV',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ticker', full_name='d1100.data.WikiDailyOHLCV.ticker', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ts', full_name='d1100.data.WikiDailyOHLCV.ts', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='open', full_name='d1100.data.WikiDailyOHLCV.open', index=2,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='high', full_name='d1100.data.WikiDailyOHLCV.high', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='low', full_name='d1100.data.WikiDailyOHLCV.low', index=4,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='close', full_name='d1100.data.WikiDailyOHLCV.close', index=5,
number=6, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volume', full_name='d1100.data.WikiDailyOHLCV.volume', index=6,
number=7, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exDividend', full_name='d1100.data.WikiDailyOHLCV.exDividend', index=7,
number=8, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='splitRatio', full_name='d1100.data.WikiDailyOHLCV.splitRatio', index=8,
number=9, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjOpen', full_name='d1100.data.WikiDailyOHLCV.adjOpen', index=9,
number=10, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjHigh', full_name='d1100.data.WikiDailyOHLCV.adjHigh', index=10,
number=11, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjLow', full_name='d1100.data.WikiDailyOHLCV.adjLow', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjClose', full_name='d1100.data.WikiDailyOHLCV.adjClose', index=12,
number=13, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjVolume', full_name='d1100.data.WikiDailyOHLCV.adjVolume', index=13,
number=14, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=37,
serialized_end=280,
)
_WIKIDAILYCOLOHLCV = _descriptor.Descriptor(
name='WikiDailyColOHLCV',
full_name='d1100.data.WikiDailyColOHLCV',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dayts', full_name='d1100.data.WikiDailyColOHLCV.dayts', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tickers', full_name='d1100.data.WikiDailyColOHLCV.tickers', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opens', full_name='d1100.data.WikiDailyColOHLCV.opens', index=2,
number=3, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highs', full_name='d1100.data.WikiDailyColOHLCV.highs', index=3,
number=4, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lows', full_name='d1100.data.WikiDailyColOHLCV.lows', index=4,
number=5, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closes', full_name='d1100.data.WikiDailyColOHLCV.closes', index=5,
number=6, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volumes', full_name='d1100.data.WikiDailyColOHLCV.volumes', index=6,
number=7, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exDividends', full_name='d1100.data.WikiDailyColOHLCV.exDividends', index=7,
number=8, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='splitRatios', full_name='d1100.data.WikiDailyColOHLCV.splitRatios', index=8,
number=9, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjOpens', full_name='d1100.data.WikiDailyColOHLCV.adjOpens', index=9,
number=10, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjHighs', full_name='d1100.data.WikiDailyColOHLCV.adjHighs', index=10,
number=11, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjLows', full_name='d1100.data.WikiDailyColOHLCV.adjLows', index=11,
number=12, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjCloses', full_name='d1100.data.WikiDailyColOHLCV.adjCloses', index=12,
number=13, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjVolumes', full_name='d1100.data.WikiDailyColOHLCV.adjVolumes', index=13,
number=14, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=283,
serialized_end=545,
)
_WIKIDAILYCOLBYTESOHLCV = _descriptor.Descriptor(
name='WikiDailyColBytesOHLCV',
full_name='d1100.data.WikiDailyColBytesOHLCV',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dayts', full_name='d1100.data.WikiDailyColBytesOHLCV.dayts', index=0,
number=1, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tickers', full_name='d1100.data.WikiDailyColBytesOHLCV.tickers', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opens', full_name='d1100.data.WikiDailyColBytesOHLCV.opens', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='highs', full_name='d1100.data.WikiDailyColBytesOHLCV.highs', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lows', full_name='d1100.data.WikiDailyColBytesOHLCV.lows', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='closes', full_name='d1100.data.WikiDailyColBytesOHLCV.closes', index=5,
number=6, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='volumes', full_name='d1100.data.WikiDailyColBytesOHLCV.volumes', index=6,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='exDividends', full_name='d1100.data.WikiDailyColBytesOHLCV.exDividends', index=7,
number=8, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='splitRatios', full_name='d1100.data.WikiDailyColBytesOHLCV.splitRatios', index=8,
number=9, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjOpens', full_name='d1100.data.WikiDailyColBytesOHLCV.adjOpens', index=9,
number=10, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjHighs', full_name='d1100.data.WikiDailyColBytesOHLCV.adjHighs', index=10,
number=11, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjLows', full_name='d1100.data.WikiDailyColBytesOHLCV.adjLows', index=11,
number=12, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjCloses', full_name='d1100.data.WikiDailyColBytesOHLCV.adjCloses', index=12,
number=13, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='adjVolumes', full_name='d1100.data.WikiDailyColBytesOHLCV.adjVolumes', index=13,
number=14, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=548,
serialized_end=815,
)
DESCRIPTOR.message_types_by_name['WikiDailyOHLCV'] = _WIKIDAILYOHLCV
DESCRIPTOR.message_types_by_name['WikiDailyColOHLCV'] = _WIKIDAILYCOLOHLCV
DESCRIPTOR.message_types_by_name['WikiDailyColBytesOHLCV'] = _WIKIDAILYCOLBYTESOHLCV
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
WikiDailyOHLCV = _reflection.GeneratedProtocolMessageType('WikiDailyOHLCV', (_message.Message,), dict(
DESCRIPTOR = _WIKIDAILYOHLCV,
__module__ = 'WikiDailyOHLCV_pb2'
# @@protoc_insertion_point(class_scope:d1100.data.WikiDailyOHLCV)
))
_sym_db.RegisterMessage(WikiDailyOHLCV)
WikiDailyColOHLCV = _reflection.GeneratedProtocolMessageType('WikiDailyColOHLCV', (_message.Message,), dict(
DESCRIPTOR = _WIKIDAILYCOLOHLCV,
__module__ = 'WikiDailyOHLCV_pb2'
# @@protoc_insertion_point(class_scope:d1100.data.WikiDailyColOHLCV)
))
_sym_db.RegisterMessage(WikiDailyColOHLCV)
WikiDailyColBytesOHLCV = _reflection.GeneratedProtocolMessageType('WikiDailyColBytesOHLCV', (_message.Message,), dict(
DESCRIPTOR = _WIKIDAILYCOLBYTESOHLCV,
__module__ = 'WikiDailyOHLCV_pb2'
# @@protoc_insertion_point(class_scope:d1100.data.WikiDailyColBytesOHLCV)
))
_sym_db.RegisterMessage(WikiDailyColBytesOHLCV)
# @@protoc_insertion_point(module_scope)
| 47.769596
| 1,849
| 0.728954
| 2,668
| 20,111
| 5.277736
| 0.073838
| 0.07329
| 0.041545
| 0.054329
| 0.854769
| 0.757546
| 0.712449
| 0.700518
| 0.689014
| 0.689014
| 0
| 0.061583
| 0.139277
| 20,111
| 420
| 1,850
| 47.883333
| 0.751878
| 0.01795
| 0
| 0.681934
| 1
| 0.002545
| 0.203779
| 0.176071
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.015267
| 0
| 0.015267
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c550fb51f298479b7c8b7639412d5fd3a185ccab
| 3,958
|
py
|
Python
|
packages/augur-core/tests/trading/test_orderbook_ordering.py
|
autun12/augur
|
71ec78e09c1bba3ef15a9f90336edc78c76b5c9e
|
[
"MIT"
] | null | null | null |
packages/augur-core/tests/trading/test_orderbook_ordering.py
|
autun12/augur
|
71ec78e09c1bba3ef15a9f90336edc78c76b5c9e
|
[
"MIT"
] | null | null | null |
packages/augur-core/tests/trading/test_orderbook_ordering.py
|
autun12/augur
|
71ec78e09c1bba3ef15a9f90336edc78c76b5c9e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from ethereum.tools import tester
from ethereum.tools.tester import TransactionFailed
from utils import longTo32Bytes, longToHexString, bytesToHexString, fix, AssertLog, stringToBytes, EtherDelta, PrintGasUsed
from constants import ASK, BID, YES, NO
from pytest import raises, fixture, mark
from pprint import pprint
def test_correct_order_for_same_price(contractsFixture, cash, market, universe):
createOrder = contractsFixture.contracts['CreateOrder']
orders = contractsFixture.contracts['Orders']
tradeGroupID = "42"
nullAddress = longTo32Bytes(0)
assert orders.getBestOrderId(BID, market.address, 1) == nullAddress
orderID1 = createOrder.publicCreateOrder(BID, fix(1), 3001, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('1', '3001'))
orderID2 = createOrder.publicCreateOrder(BID, fix(1), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('1', '3000'))
orderID3 = createOrder.publicCreateOrder(BID, fix(2), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('2', '3000'))
orderID4 = createOrder.publicCreateOrder(BID, fix(3), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('3', '3000'))
orderID5 = createOrder.publicCreateOrder(BID, fix(1), 2999, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('1', '2999'))
orderID6 = createOrder.publicCreateOrder(BID, fix(4), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('4', '3000'))
assert orders.getWorseOrderId(orderID1) == orderID2
assert orders.getWorseOrderId(orderID2) == orderID3
assert orders.getWorseOrderId(orderID3) == orderID4
assert orders.getWorseOrderId(orderID4) == orderID6
assert orders.getWorseOrderId(orderID6) == orderID5
assert orders.getWorseOrderId(orderID5) == longTo32Bytes(0)
assert orders.getBetterOrderId(orderID2) == orderID1
assert orders.getBetterOrderId(orderID3) == orderID2
assert orders.getBetterOrderId(orderID4) == orderID3
assert orders.getBetterOrderId(orderID5) == orderID6
assert orders.getBetterOrderId(orderID6) == orderID4
assert orders.getBetterOrderId(orderID1) == longTo32Bytes(0)
def test_no_orphans_when_same_price(contractsFixture, cash, market, universe):
createOrder = contractsFixture.contracts['CreateOrder']
orders = contractsFixture.contracts['Orders']
tradeGroupID = "42"
nullAddress = longTo32Bytes(0)
# create orders
assert orders.getBestOrderId(BID, market.address, 1) == nullAddress
orderID1 = createOrder.publicCreateOrder(BID, fix(1), 3001, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('1', '3001'))
orderID2 = createOrder.publicCreateOrder(BID, fix(1), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('1', '3000'))
orderID3 = createOrder.publicCreateOrder(BID, fix(2), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('2', '3000'))
orderID4 = createOrder.publicCreateOrder(BID, fix(3), 3000, market.address, YES, longTo32Bytes(0), longTo32Bytes(0), tradeGroupID, sender = tester.k1, value=fix('3', '3000'))
assert orders.getWorseOrderId(orderID1) == orderID2
assert orders.getWorseOrderId(orderID2) == orderID3
assert orders.getWorseOrderId(orderID3) == orderID4
assert orders.getWorseOrderId(orderID4) == longTo32Bytes(0)
assert orders.getBetterOrderId(orderID2) == orderID1
assert orders.getBetterOrderId(orderID3) == orderID2
assert orders.getBetterOrderId(orderID4) == orderID3
assert orders.getBetterOrderId(orderID1) == longTo32Bytes(0)
| 70.678571
| 178
| 0.752653
| 433
| 3,958
| 6.856813
| 0.166282
| 0.1226
| 0.104412
| 0.114517
| 0.817784
| 0.805995
| 0.781745
| 0.781745
| 0.781745
| 0.781745
| 0
| 0.0697
| 0.122789
| 3,958
| 55
| 179
| 71.963636
| 0.785426
| 0.00859
| 0
| 0.666667
| 0
| 0
| 0.022438
| 0
| 0
| 0
| 0
| 0
| 0.479167
| 1
| 0.041667
| false
| 0
| 0.125
| 0
| 0.166667
| 0.020833
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c568ed6d416c3cd7b7b14e11a94bbd58f50cb179
| 180
|
py
|
Python
|
automate_the_boring_stuff/cap_10_assertion.py
|
juarezhenriquelisboa/Python
|
5c5498b33e7cba4e3bfa322a6a76bed74b68e6bf
|
[
"MIT"
] | 1
|
2021-01-01T14:46:28.000Z
|
2021-01-01T14:46:28.000Z
|
automate_the_boring_stuff/cap_10_assertion.py
|
juarezhenriquelisboa/Python
|
5c5498b33e7cba4e3bfa322a6a76bed74b68e6bf
|
[
"MIT"
] | null | null | null |
automate_the_boring_stuff/cap_10_assertion.py
|
juarezhenriquelisboa/Python
|
5c5498b33e7cba4e3bfa322a6a76bed74b68e6bf
|
[
"MIT"
] | null | null | null |
door_status = 'open'
assert door_status == 'open', 'The pod bay doors need to be "open".'
door_status = 'close'
assert door_status == 'open', 'The pod bay doors need to be "open".'
| 45
| 68
| 0.694444
| 30
| 180
| 4.033333
| 0.4
| 0.330579
| 0.347107
| 0.330579
| 0.760331
| 0.760331
| 0.760331
| 0.760331
| 0.760331
| 0.760331
| 0
| 0
| 0.161111
| 180
| 4
| 69
| 45
| 0.801325
| 0
| 0
| 0.5
| 0
| 0
| 0.491713
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3d7cd8b382b84e5a001067ce590624c33dc1724c
| 1,573
|
py
|
Python
|
data-mining/use-multi-gpu.py
|
RoderickLi/python-snippet
|
7cc672c455a768864cf38d6bbebdf8337b9c510c
|
[
"MIT"
] | 6
|
2019-11-06T02:11:43.000Z
|
2021-03-21T02:48:10.000Z
|
data-mining/use-multi-gpu.py
|
RoderickLi/python-snippet
|
7cc672c455a768864cf38d6bbebdf8337b9c510c
|
[
"MIT"
] | null | null | null |
data-mining/use-multi-gpu.py
|
RoderickLi/python-snippet
|
7cc672c455a768864cf38d6bbebdf8337b9c510c
|
[
"MIT"
] | 2
|
2019-11-06T02:11:44.000Z
|
2019-11-06T02:46:00.000Z
|
from keras.utils import multi_gpu_model
#
pretrain_filepath = "regession-pretrain-{epoch:02d}-{val_loss:.2f}.hdf5"
from keras.callbacks import Callback
class mutiGPUSaver(Callback):
def __init__(self, model):
self.model_to_save = model
def on_epoch_end(self,epoch,logs=None):
self.model_to_save.save('pretrain_%s.hdf5'%epoch)
pretrain_filepath = "regession-pretrain-{epoch:02d}-{val_loss:.2f}.hdf5"
from keras.callbacks import Callback
class mutiGPUSaver(Callback): # need to have your OWN saver
def __init__(self, model):
self.model_to_save = model
def on_epoch_end(self,epoch,logs=None):
self.model_to_save.save('pretrain_%s.hdf5'%epoch)
pretrain_checkpoint = mutiGPUSaver(p_model)
p_parallel_model = multi_gpu_model(p_model, gpus=2) # do NOT specify gpu in global keras setting
p_parallel_model.summary()
# 预训练
p_parallel_model.layers[0].trainable=False
p_parallel_model.compile(optimizer = OPTIMIZER, loss = LOSS_FUNC_NAME ,metrics = METRICS)
p_parallel_model_history = p_parallel_model.fit(
[train_img_X, train_extra_X],
train_label,
validation_data=[[test_img_X, test_extra_X], test_label],
epochs=10,
batch_size=len(train_extra_X),
)
p_parallel_model.layers[0].trainable=True
p_parallel_model.compile(optimizer = OPTIMIZER, loss = LOSS_FUNC_NAME ,metrics = METRICS)
p_parallel_model_history = p_parallel_model.fit(
[train_img_X, train_extra_X],
train_label,
validation_data=[[test_img_X, test_extra_X], test_label],
epochs=40,
batch_size=len(train_extra_X),
callbacks=[pretrain_checkpoint]
)
| 32.770833
| 96
| 0.774317
| 238
| 1,573
| 4.747899
| 0.310924
| 0.079646
| 0.123894
| 0.053097
| 0.80531
| 0.80531
| 0.711504
| 0.711504
| 0.711504
| 0.711504
| 0
| 0.012257
| 0.118245
| 1,573
| 47
| 97
| 33.468085
| 0.802451
| 0.047044
| 0
| 0.702703
| 0
| 0
| 0.088353
| 0.066934
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108108
| false
| 0
| 0.081081
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3dd8b20203d559c367b45c7a188c4305f2393027
| 6,258
|
py
|
Python
|
TaglifeRest/TagLifeApp/migrations/0001_initial.py
|
furkankav/TagLife
|
1f4325220209940a70e3847846ef2d05cffcb42a
|
[
"Apache-2.0"
] | null | null | null |
TaglifeRest/TagLifeApp/migrations/0001_initial.py
|
furkankav/TagLife
|
1f4325220209940a70e3847846ef2d05cffcb42a
|
[
"Apache-2.0"
] | null | null | null |
TaglifeRest/TagLifeApp/migrations/0001_initial.py
|
furkankav/TagLife
|
1f4325220209940a70e3847846ef2d05cffcb42a
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-03 17:21
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=512)),
('vote', models.IntegerField(blank=True, default=0, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Entry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=512)),
('vote', models.IntegerField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='EntryTagRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('entry', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tags', to='TagLifeApp.Entry')),
],
),
migrations.CreateModel(
name='FollowTopicRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Predicate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('predicateString', models.CharField(max_length=64)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tagString', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='topics', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='TopicTagRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('predicate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='types', to='TagLifeApp.Predicate')),
('tag', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='relatedTopic', to='TagLifeApp.Tag')),
('topic', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tags', to='TagLifeApp.Topic')),
],
),
migrations.AddField(
model_name='followtopicrelation',
name='topic',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='follower', to='TagLifeApp.Topic'),
),
migrations.AddField(
model_name='followtopicrelation',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='following', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='entrytagrelation',
name='tag',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='relatedEntry', to='TagLifeApp.Tag'),
),
migrations.AddField(
model_name='entry',
name='topic',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entries', to='TagLifeApp.Topic'),
),
migrations.AddField(
model_name='entry',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='entries', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='comment',
name='entry',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='TagLifeApp.Entry'),
),
migrations.AddField(
model_name='comment',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL),
),
]
| 47.409091
| 143
| 0.600671
| 629
| 6,258
| 5.786963
| 0.151033
| 0.035165
| 0.092308
| 0.10989
| 0.817308
| 0.811538
| 0.789011
| 0.777473
| 0.751099
| 0.692857
| 0
| 0.006941
| 0.263343
| 6,258
| 131
| 144
| 47.770992
| 0.782646
| 0.010866
| 0
| 0.674797
| 1
| 0
| 0.109746
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03252
| 0
| 0.065041
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a7eb61f5425febc22bdda6e6459cb347fba0329
| 120
|
py
|
Python
|
fatgraph_v.1.0/cal_fatgraph/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
fatgraph_v.1.0/cal_fatgraph/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
fatgraph_v.1.0/cal_fatgraph/__init__.py
|
HaruNegami/Fatgraph_v.1.0
|
af640c4ea44bdb3ca17cbb7158c2daf7dae979f8
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from . import track_boundary
from . import hydrogen
from . import judge
from . import generate_fatgraph
| 20
| 31
| 0.783333
| 17
| 120
| 5.411765
| 0.647059
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.158333
| 120
| 6
| 31
| 20
| 0.90099
| 0.108333
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9aa88c213142e8c43a3d2e750861d73f5f7ec63b
| 4,322
|
py
|
Python
|
lib/transform_pos.py
|
liuruoze/HierNet-SC2
|
7abfde0088e90416f11922d67c0f09659c7ecf81
|
[
"Apache-2.0"
] | 2
|
2022-02-28T08:39:43.000Z
|
2022-03-03T02:28:23.000Z
|
lib/transform_pos.py
|
liuruoze/HierNet-SC2
|
7abfde0088e90416f11922d67c0f09659c7ecf81
|
[
"Apache-2.0"
] | null | null | null |
lib/transform_pos.py
|
liuruoze/HierNet-SC2
|
7abfde0088e90416f11922d67c0f09659c7ecf81
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
from pysc2.lib import point
from pysc2.lib import transform
class Pos(object):
def __init__(self, x=0, y=0):
self.x = x
self.y = y
def world_to_screen_pos(game_info, pos, obs):
"""
:param game_info: env.game_info
:param pos: target_world_space_pos
:param obs: obs.raw_observation.observation.raw_data.player.camera
:return: screen_pos
"""
# init parameter and define
map_size = point.Point.build(game_info.start_raw.map_size)
fl_opts = game_info.options.feature_layer
feature_layer_screen_size = point.Point.build(fl_opts.resolution)
camera_width_world_units = fl_opts.width
world_to_screen = transform.Linear(point.Point(1, -1), point.Point(0, map_size.y))
screen_to_fl_screen = transform.Linear(feature_layer_screen_size / camera_width_world_units)
world_to_fl_screen = transform.Chain(world_to_screen, screen_to_fl_screen, transform.Floor())
# Update the camera transform based on the new camera center.
camera_center = obs.raw_observation.observation.raw_data.player.camera
camera_radius = (feature_layer_screen_size /
feature_layer_screen_size.x *
camera_width_world_units / 2)
camera_center = point.Point.build(camera_center)
center = camera_center.bound(camera_radius, map_size - camera_radius)
camera = point.Rect(
(center - camera_radius).bound(map_size),
(center + camera_radius).bound(map_size))
world_to_screen.offset = (-camera.bl * world_to_screen.scale)
trans_pos = world_to_fl_screen.fwd_pt(point.Point.build(pos))
return np.clip(np.array(trans_pos), 0, 63).tolist()
def world_to_minimap_pos(game_info, pos):
map_size = point.Point.build(game_info.start_raw.map_size)
fl_opts = game_info.options.feature_layer
feature_layer_minimap_size = point.Point.build(fl_opts.minimap_resolution)
max_map_dim = map_size.max_dim()
world_to_minimap = transform.Linear(point.Point(1, -1), point.Point(0, map_size.y))
minimap_to_fl_minimap = transform.Linear(feature_layer_minimap_size / max_map_dim)
world_to_fl_minimap = transform.Chain(
world_to_minimap,
minimap_to_fl_minimap,
transform.Floor()
)
trans_pos = world_to_fl_minimap.fwd_pt(point.Point.build(pos))
return np.clip(np.array(trans_pos), 0, 63).tolist()
def screen_to_minimap_pos(game_info, screen_pos, obs):
screen_pos = Pos(screen_pos[0], screen_pos[1])
# init parameter and define
map_size = point.Point.build(game_info.start_raw.map_size)
fl_opts = game_info.options.feature_layer
feature_layer_screen_size = point.Point.build(fl_opts.resolution)
feature_layer_minimap_size = point.Point.build(fl_opts.minimap_resolution)
# screen to world
camera_width_world_units = fl_opts.width
world_to_screen = transform.Linear(point.Point(1, -1), point.Point(0, map_size.y))
screen_to_fl_screen = transform.Linear(feature_layer_screen_size / camera_width_world_units)
world_to_fl_screen = transform.Chain(world_to_screen, screen_to_fl_screen, transform.Floor())
# Update the camera transform based on the new camera center.
camera_center = obs.raw_observation.observation.raw_data.player.camera
camera_radius = (feature_layer_screen_size /
feature_layer_screen_size.x *
camera_width_world_units / 2)
camera_center = point.Point.build(camera_center)
center = camera_center.bound(camera_radius, map_size - camera_radius)
camera = point.Rect(
(center - camera_radius).bound(map_size),
(center + camera_radius).bound(map_size))
world_to_screen.offset = (-camera.bl * world_to_screen.scale)
world_pos = world_to_fl_screen.back_pt(point.Point.build(screen_pos))
# world to minimap
max_map_dim = map_size.max_dim()
world_to_minimap = transform.Linear(point.Point(1, -1), point.Point(0, map_size.y))
minimap_to_fl_minimap = transform.Linear(feature_layer_minimap_size / max_map_dim)
world_to_fl_minimap = transform.Chain(
world_to_minimap,
minimap_to_fl_minimap,
transform.Floor()
)
minimap_pos = world_to_fl_minimap.fwd_pt(point.Point.build(world_pos))
return np.clip(np.array(minimap_pos), 0, 63).tolist()
| 39.651376
| 97
| 0.732763
| 639
| 4,322
| 4.580595
| 0.120501
| 0.055005
| 0.066621
| 0.06013
| 0.875641
| 0.846259
| 0.838743
| 0.838743
| 0.822685
| 0.822685
| 0
| 0.008105
| 0.172143
| 4,322
| 108
| 98
| 40.018519
| 0.80995
| 0.083063
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.041667
| 0
| 0.152778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ad15f0d47bed6a2faf16dbd67820df238d9a698
| 6,888
|
py
|
Python
|
Showers/Main.py
|
aneeshverenkar/HackCU
|
3a639a7ad77b34952731da125050e7bfaf590922
|
[
"BSD-2-Clause"
] | null | null | null |
Showers/Main.py
|
aneeshverenkar/HackCU
|
3a639a7ad77b34952731da125050e7bfaf590922
|
[
"BSD-2-Clause"
] | null | null | null |
Showers/Main.py
|
aneeshverenkar/HackCU
|
3a639a7ad77b34952731da125050e7bfaf590922
|
[
"BSD-2-Clause"
] | null | null | null |
<<<<<<< HEAD
import pprint
import sys
import spotipy
import requests
from spotipy.oauth2 import SpotifyClientCredentials
import spotipy.util as util
import simplejson as json
def play():
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
def next_song():
r = requests.post('https://api.spotify.com/v1/me/player/next', headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
print(r.status_code)
def pause():
r = requests.put('https://api.spotify.com/v1/me/player/pause', headers={'Authorization': temp})
def prev_song():
r = requests.post('https://api.spotify.com/v1/me/player/next', headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
def play_song(name):
sp = spotipy.Spotify()
search_result = sp.search(name)
artist_list = search_result['tracks']['items']
album = 'album'
track = artist_list[0]['uri']
#r = requests.get()
data = json.dumps(
{
'uris': [track]
}
)
r = requests.put('https://api.spotify.com/v1/me/player/play', data=data, headers={'Authorization': temp})
if __name__ == '__main__':
username = "toomuchsaucehackcu"
authorization = {'Authorization': 'Basic 5fd9106f4c744e8a80248d2ab3d59a27:5ffe3b9afc7449c48f670c37feb37102'}
client_id = '5fd9106f4c744e8a80248d2ab3d59a27'
client_secret = '5ffe3b9afc7449c48f670c37feb37102'
params = {'client_id': client_id, 'response_type':'code','redirect_uri':'http://localhost:8888/callback', 'scope': 'streaming','show_dialog':True}
token = util.prompt_for_user_token(username, scope='streaming', client_id='5fd9106f4c744e8a80248d2ab3d59a27',
client_secret='5ffe3b9afc7449c48f670c37feb37102',
redirect_uri='http://localhost:8888/callback')
temp = 'Bearer ' + token
choice = input('What would you like to do: ')
while choice != 'exit':
if choice == 'play':
play()
elif choice == 'next':
next_song()
elif choice == 'pause':
pause()
elif 'play song' in choice:
song_choice = choice.replace('play', '').replace('song', '').strip()
#print(song_choice)
play_song(song_choice)
choice = input('What would you like to do: ')
=======
import pprint
import sys
import spotipy
import requests
from spotipy.oauth2 import SpotifyClientCredentials
import spotipy.util as util
import simplejson as json
scope = 'streaming user-read-playback-state user-modify-playback-state'
def play():
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
def next_song():
r = requests.post('https://api.spotify.com/v1/me/player/next', headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
def pause():
r = requests.put('https://api.spotify.com/v1/me/player/pause', headers={'Authorization': temp})
def prev_song():
r = requests.post('https://api.spotify.com/v1/me/player/next', headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
def play_song(name):
sp = spotipy.Spotify()
search_result = sp.search(name)
artist_list = search_result['tracks']['items']
album = 'album'
track = artist_list[0]['uri']
#r = requests.get()
data = json.dumps(
{
'uris': [track]
}
)
r = requests.put('https://api.spotify.com/v1/me/player/play', data=data, headers={'Authorization': temp})
def control_volume(direction, value=1):
r = requests.get('https://api.spotify.com/v1/me/player', headers={'Authorization': temp})
device_info = r.json()
volume = device_info['device']['volume_percent']
print('cur vol: ', volume)
if direction == 'down':
value = int(value) * -1
volume = int(volume) + int(value)
print('new vol: ', volume)
data=json.dumps({'volume': int(volume)})
print('data: ', data)
r = requests.put('https://api.spotify.com/v1/me/player/volume', params={'volume_percent': volume}, headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
print(r.content)
def change_device(new_device):
r = requests.get('https://api.spotify.com/v1/me/player/devices', headers={'Authorization': temp})
devices = r.json()['devices']
device_to_be =''
for device in devices:
print(device['name'])
if new_device.lower() in device['name'].lower():
device_to_be = device['id']
data = {'device_ids':[device_to_be],'play':True}
r = requests.put('https://api.spotify.com/v1/me/player', data=data, headers={'Authorization': temp})
r = requests.put('https://api.spotify.com/v1/me/player/play', headers={'Authorization': temp})
if __name__ == '__main__':
username = "toomuchsaucehackcu"
authorization = {'Authorization': 'Basic 5fd9106f4c744e8a80248d2ab3d59a27:5ffe3b9afc7449c48f670c37feb37102'}
client_id = '5fd9106f4c744e8a80248d2ab3d59a27'
client_secret = '5ffe3b9afc7449c48f670c37feb37102'
params = {'client_id': client_id, 'response_type':'code','redirect_uri':'http://localhost:8888/callback', 'scope': scope,'show_dialog':True}
token = util.prompt_for_user_token(username, scope=scope, client_id='5fd9106f4c744e8a80248d2ab3d59a27',
client_secret='5ffe3b9afc7449c48f670c37feb37102',
redirect_uri='http://localhost:8888/callback')
temp = 'Bearer ' + token
choice = input('What would you like to do: ')
while choice != 'exit':
if choice == 'play':
play()
elif choice == 'next':
next_song()
elif choice == 'pause':
pause()
elif 'play song' in choice:
song_choice = choice.replace('play', '').replace('song', '').strip()
#print(song_choice)
play_song(song_choice)
elif 'volume' in choice:
choice = choice.replace('volume', '')
choice.strip()
choice = choice.split()
direction = choice[0]
value = choice[1]
control_volume(direction, value)
elif 'change device to' in choice:
choice = choice.replace('change', '').replace('device', '').replace('to', '')
choice.strip()
change_device(choice)
choice = input('What would you like to do: ')
>>>>>>> 8eac599ec105f1088d53c0aa68f5490cc9311a84
| 43.872611
| 150
| 0.630517
| 786
| 6,888
| 5.418575
| 0.150127
| 0.04649
| 0.070439
| 0.084527
| 0.817563
| 0.803005
| 0.803005
| 0.803005
| 0.803005
| 0.787039
| 0
| 0.057185
| 0.212979
| 6,888
| 156
| 151
| 44.153846
| 0.728463
| 0.010453
| 0
| 0.671329
| 0
| 0
| 0.333036
| 0.06491
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.097902
| null | null | 0.055944
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9ae0fa88a60b77cb87dd5d461bd8336ac4c2dab8
| 540,978
|
py
|
Python
|
elements_sdk/api/main_api.py
|
elements-storage/elements-sdk-python
|
39c365fe079dcd5928c5fe1bbaa67389bd5a3d81
|
[
"MIT"
] | 6
|
2020-11-16T23:15:18.000Z
|
2022-03-14T03:56:12.000Z
|
elements_sdk/api/main_api.py
|
elements-storage/elements-sdk-python
|
39c365fe079dcd5928c5fe1bbaa67389bd5a3d81
|
[
"MIT"
] | 1
|
2021-07-28T13:03:49.000Z
|
2021-08-25T12:24:01.000Z
|
elements_sdk/api/main_api.py
|
elements-storage/elements-sdk-python
|
39c365fe079dcd5928c5fe1bbaa67389bd5a3d81
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
ELEMENTS API
The version of the OpenAPI document: 2
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from elements_sdk.api_client import ApiClient
from elements_sdk.exceptions import (
ApiTypeError,
ApiValueError
)
class MainApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def apply_configuration(self, **kwargs): # noqa: E501
"""apply_configuration # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.apply_configuration(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.apply_configuration_with_http_info(**kwargs) # noqa: E501
def apply_configuration_with_http_info(self, **kwargs): # noqa: E501
"""apply_configuration # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.apply_configuration_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method apply_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/configuration/apply', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def beep(self, **kwargs): # noqa: E501
"""beep # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.beep(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.beep_with_http_info(**kwargs) # noqa: E501
def beep_with_http_info(self, **kwargs): # noqa: E501
"""beep # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.beep_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method beep" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/beep', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def check_certificate(self, certificate, **kwargs): # noqa: E501
"""check_certificate # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_certificate(certificate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Certificate certificate: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.check_certificate_with_http_info(certificate, **kwargs) # noqa: E501
def check_certificate_with_http_info(self, certificate, **kwargs): # noqa: E501
"""check_certificate # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_certificate_with_http_info(certificate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Certificate certificate: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['certificate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_certificate" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'certificate' is set
if self.api_client.client_side_validation and ('certificate' not in local_var_params or # noqa: E501
local_var_params['certificate'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `certificate` when calling `check_certificate`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'certificate' in local_var_params:
body_params = local_var_params['certificate']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/certificate/check', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def check_chunk_uploaded(self, **kwargs): # noqa: E501
"""check_chunk_uploaded # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_chunk_uploaded(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str upload_id:
:param str chunk_number:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.check_chunk_uploaded_with_http_info(**kwargs) # noqa: E501
def check_chunk_uploaded_with_http_info(self, **kwargs): # noqa: E501
"""check_chunk_uploaded # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_chunk_uploaded_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str upload_id:
:param str chunk_number:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['upload_id', 'chunk_number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_chunk_uploaded" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'upload_id' in local_var_params and local_var_params['upload_id'] is not None: # noqa: E501
query_params.append(('upload_id', local_var_params['upload_id'])) # noqa: E501
if 'chunk_number' in local_var_params and local_var_params['chunk_number'] is not None: # noqa: E501
query_params.append(('chunk_number', local_var_params['chunk_number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/uploads/chunk', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def check_internet_connectivity(self, **kwargs): # noqa: E501
"""check_internet_connectivity # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_internet_connectivity(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CheckConnectivityEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.check_internet_connectivity_with_http_info(**kwargs) # noqa: E501
def check_internet_connectivity_with_http_info(self, **kwargs): # noqa: E501
"""check_internet_connectivity # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_internet_connectivity_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CheckConnectivityEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_internet_connectivity" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/check-connectivity', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CheckConnectivityEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def check_stor_next_license(self, stornext_license, **kwargs): # noqa: E501
"""check_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_stor_next_license(stornext_license, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param StornextLicense stornext_license: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[StorNextLicenseCheckEndpointResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.check_stor_next_license_with_http_info(stornext_license, **kwargs) # noqa: E501
def check_stor_next_license_with_http_info(self, stornext_license, **kwargs): # noqa: E501
"""check_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.check_stor_next_license_with_http_info(stornext_license, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param StornextLicense stornext_license: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[StorNextLicenseCheckEndpointResponse], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['stornext_license'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method check_stor_next_license" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'stornext_license' is set
if self.api_client.client_side_validation and ('stornext_license' not in local_var_params or # noqa: E501
local_var_params['stornext_license'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `stornext_license` when calling `check_stor_next_license`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stornext_license' in local_var_params:
body_params = local_var_params['stornext_license']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/stornext-license/check', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[StorNextLicenseCheckEndpointResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def collect_diagnostics(self, **kwargs): # noqa: E501
"""collect_diagnostics # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collect_diagnostics(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DownloadArchive
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.collect_diagnostics_with_http_info(**kwargs) # noqa: E501
def collect_diagnostics_with_http_info(self, **kwargs): # noqa: E501
"""collect_diagnostics # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.collect_diagnostics_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DownloadArchive, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method collect_diagnostics" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/collect-diagnostics', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadArchive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_archive(self, create_download_archive, **kwargs): # noqa: E501
"""create_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_archive(create_download_archive, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateDownloadArchive create_download_archive: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DownloadArchive
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_archive_with_http_info(create_download_archive, **kwargs) # noqa: E501
def create_archive_with_http_info(self, create_download_archive, **kwargs): # noqa: E501
"""create_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_archive_with_http_info(create_download_archive, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CreateDownloadArchive create_download_archive: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DownloadArchive, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['create_download_archive'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'create_download_archive' is set
if self.api_client.client_side_validation and ('create_download_archive' not in local_var_params or # noqa: E501
local_var_params['create_download_archive'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_download_archive` when calling `create_archive`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_download_archive' in local_var_params:
body_params = local_var_params['create_download_archive']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/create', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadArchive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_group(self, elements_group_detail, **kwargs): # noqa: E501
"""create_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_group(elements_group_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsGroupDetail elements_group_detail: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsGroupDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_group_with_http_info(elements_group_detail, **kwargs) # noqa: E501
def create_group_with_http_info(self, elements_group_detail, **kwargs): # noqa: E501
"""create_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_group_with_http_info(elements_group_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsGroupDetail elements_group_detail: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsGroupDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['elements_group_detail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'elements_group_detail' is set
if self.api_client.client_side_validation and ('elements_group_detail' not in local_var_params or # noqa: E501
local_var_params['elements_group_detail'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_group_detail` when calling `create_group`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_group_detail' in local_var_params:
body_params = local_var_params['elements_group_detail']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsGroupDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_home_workspace(self, id, create_home_workspace_request, **kwargs): # noqa: E501
"""create_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_home_workspace(id, create_home_workspace_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param CreateHomeWorkspaceRequest create_home_workspace_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_home_workspace_with_http_info(id, create_home_workspace_request, **kwargs) # noqa: E501
def create_home_workspace_with_http_info(self, id, create_home_workspace_request, **kwargs): # noqa: E501
"""create_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_home_workspace_with_http_info(id, create_home_workspace_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param CreateHomeWorkspaceRequest create_home_workspace_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workspace, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'create_home_workspace_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_home_workspace" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `create_home_workspace`") # noqa: E501
# verify the required parameter 'create_home_workspace_request' is set
if self.api_client.client_side_validation and ('create_home_workspace_request' not in local_var_params or # noqa: E501
local_var_params['create_home_workspace_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `create_home_workspace_request` when calling `create_home_workspace`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_home_workspace_request' in local_var_params:
body_params = local_var_params['create_home_workspace_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/home', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_ntp_server(self, ntp_server, **kwargs): # noqa: E501
"""create_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ntp_server(ntp_server, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NTPServer ntp_server: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NTPServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_ntp_server_with_http_info(ntp_server, **kwargs) # noqa: E501
def create_ntp_server_with_http_info(self, ntp_server, **kwargs): # noqa: E501
"""create_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_ntp_server_with_http_info(ntp_server, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NTPServer ntp_server: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NTPServer, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ntp_server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_ntp_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ntp_server' is set
if self.api_client.client_side_validation and ('ntp_server' not in local_var_params or # noqa: E501
local_var_params['ntp_server'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ntp_server` when calling `create_ntp_server`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ntp_server' in local_var_params:
body_params = local_var_params['ntp_server']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NTPServer', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_user(self, elements_user_detail, **kwargs): # noqa: E501
"""create_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user(elements_user_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserDetail elements_user_detail: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_user_with_http_info(elements_user_detail, **kwargs) # noqa: E501
def create_user_with_http_info(self, elements_user_detail, **kwargs): # noqa: E501
"""create_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_user_with_http_info(elements_user_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserDetail elements_user_detail: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['elements_user_detail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'elements_user_detail' is set
if self.api_client.client_side_validation and ('elements_user_detail' not in local_var_params or # noqa: E501
local_var_params['elements_user_detail'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_user_detail` when calling `create_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_user_detail' in local_var_params:
body_params = local_var_params['elements_user_detail']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_workstation(self, workstation, **kwargs): # noqa: E501
"""create_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_workstation(workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Workstation workstation: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_workstation_with_http_info(workstation, **kwargs) # noqa: E501
def create_workstation_with_http_info(self, workstation, **kwargs): # noqa: E501
"""create_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_workstation_with_http_info(workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Workstation workstation: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['workstation'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workstation' is set
if self.api_client.client_side_validation and ('workstation' not in local_var_params or # noqa: E501
local_var_params['workstation'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workstation` when calling `create_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workstation' in local_var_params:
body_params = local_var_params['workstation']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_download_archive(self, id, **kwargs): # noqa: E501
"""delete_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_download_archive(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_download_archive_with_http_info(id, **kwargs) # noqa: E501
def delete_download_archive_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_download_archive_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_download_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_download_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_group(self, id, **kwargs): # noqa: E501
"""delete_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_group_with_http_info(id, **kwargs) # noqa: E501
def delete_group_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_home_workspace(self, id, **kwargs): # noqa: E501
"""delete_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_home_workspace(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_home_workspace_with_http_info(id, **kwargs) # noqa: E501
def delete_home_workspace_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_home_workspace_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_home_workspace" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_home_workspace`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/home', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_ntp_server(self, id, **kwargs): # noqa: E501
"""delete_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ntp_server(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_ntp_server_with_http_info(id, **kwargs) # noqa: E501
def delete_ntp_server_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_ntp_server_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_ntp_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_ntp_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_user(self, id, **kwargs): # noqa: E501
"""delete_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_user_with_http_info(id, **kwargs) # noqa: E501
def delete_user_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_user_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_workstation(self, id, **kwargs): # noqa: E501
"""delete_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_workstation(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_workstation_with_http_info(id, **kwargs) # noqa: E501
def delete_workstation_with_http_info(self, id, **kwargs): # noqa: E501
"""delete_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_workstation_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `delete_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def disable_user_totp(self, id, **kwargs): # noqa: E501
"""disable_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_user_totp(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.disable_user_totp_with_http_info(id, **kwargs) # noqa: E501
def disable_user_totp_with_http_info(self, id, **kwargs): # noqa: E501
"""disable_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.disable_user_totp_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method disable_user_totp" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `disable_user_totp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/totp', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def enable_user_totp(self, id, enable_totp_request, **kwargs): # noqa: E501
"""enable_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_user_totp(id, enable_totp_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param EnableTOTPRequest enable_totp_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.enable_user_totp_with_http_info(id, enable_totp_request, **kwargs) # noqa: E501
def enable_user_totp_with_http_info(self, id, enable_totp_request, **kwargs): # noqa: E501
"""enable_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.enable_user_totp_with_http_info(id, enable_totp_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param EnableTOTPRequest enable_totp_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'enable_totp_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method enable_user_totp" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `enable_user_totp`") # noqa: E501
# verify the required parameter 'enable_totp_request' is set
if self.api_client.client_side_validation and ('enable_totp_request' not in local_var_params or # noqa: E501
local_var_params['enable_totp_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `enable_totp_request` when calling `enable_user_totp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'enable_totp_request' in local_var_params:
body_params = local_var_params['enable_totp_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/totp', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def finish_upload(self, finish_upload_endpoint_request, **kwargs): # noqa: E501
"""finish_upload # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finish_upload(finish_upload_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FinishUploadEndpointRequest finish_upload_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.finish_upload_with_http_info(finish_upload_endpoint_request, **kwargs) # noqa: E501
def finish_upload_with_http_info(self, finish_upload_endpoint_request, **kwargs): # noqa: E501
"""finish_upload # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.finish_upload_with_http_info(finish_upload_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param FinishUploadEndpointRequest finish_upload_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['finish_upload_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method finish_upload" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'finish_upload_endpoint_request' is set
if self.api_client.client_side_validation and ('finish_upload_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['finish_upload_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `finish_upload_endpoint_request` when calling `finish_upload`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'finish_upload_endpoint_request' in local_var_params:
body_params = local_var_params['finish_upload_endpoint_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/uploads/finish', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def fix_ldap_group_memberships(self, id, **kwargs): # noqa: E501
"""fix_ldap_group_memberships # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fix_ldap_group_memberships(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.fix_ldap_group_memberships_with_http_info(id, **kwargs) # noqa: E501
def fix_ldap_group_memberships_with_http_info(self, id, **kwargs): # noqa: E501
"""fix_ldap_group_memberships # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.fix_ldap_group_memberships_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method fix_ldap_group_memberships" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `fix_ldap_group_memberships`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers/{id}/fix-memberships', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_client_sessions(self, **kwargs): # noqa: E501
"""get_all_client_sessions # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_client_sessions(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str user: Filter the returned list by `user`.
:param str mounted_workspaces__mount_node: Filter the returned list by `mounted_workspaces__mount_node`.
:param str workstation: Filter the returned list by `workstation`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ClientSession]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_client_sessions_with_http_info(**kwargs) # noqa: E501
def get_all_client_sessions_with_http_info(self, **kwargs): # noqa: E501
"""get_all_client_sessions # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_client_sessions_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str user: Filter the returned list by `user`.
:param str mounted_workspaces__mount_node: Filter the returned list by `mounted_workspaces__mount_node`.
:param str workstation: Filter the returned list by `workstation`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ClientSession], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user', 'mounted_workspaces__mount_node', 'workstation', 'ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_client_sessions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'user' in local_var_params and local_var_params['user'] is not None: # noqa: E501
query_params.append(('user', local_var_params['user'])) # noqa: E501
if 'mounted_workspaces__mount_node' in local_var_params and local_var_params['mounted_workspaces__mount_node'] is not None: # noqa: E501
query_params.append(('mounted_workspaces__mount_node', local_var_params['mounted_workspaces__mount_node'])) # noqa: E501
if 'workstation' in local_var_params and local_var_params['workstation'] is not None: # noqa: E501
query_params.append(('workstation', local_var_params['workstation'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/client-sessions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ClientSession]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_download_archives(self, **kwargs): # noqa: E501
"""get_all_download_archives # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_download_archives(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[DownloadArchive]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_download_archives_with_http_info(**kwargs) # noqa: E501
def get_all_download_archives_with_http_info(self, **kwargs): # noqa: E501
"""get_all_download_archives # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_download_archives_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[DownloadArchive], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_download_archives" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DownloadArchive]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_downloads(self, **kwargs): # noqa: E501
"""get_all_downloads # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_downloads(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[Download]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_downloads_with_http_info(**kwargs) # noqa: E501
def get_all_downloads_with_http_info(self, **kwargs): # noqa: E501
"""get_all_downloads # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_downloads_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[Download], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_downloads" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Download]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_groups(self, **kwargs): # noqa: E501
"""get_all_groups # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_groups(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ElementsGroup]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_groups_with_http_info(**kwargs) # noqa: E501
def get_all_groups_with_http_info(self, **kwargs): # noqa: E501
"""get_all_groups # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_groups_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ElementsGroup], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['name', 'ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_groups" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ElementsGroup]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_ldap_servers(self, **kwargs): # noqa: E501
"""get_all_ldap_servers # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ldap_servers(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[LDAPServer]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_ldap_servers_with_http_info(**kwargs) # noqa: E501
def get_all_ldap_servers_with_http_info(self, **kwargs): # noqa: E501
"""get_all_ldap_servers # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ldap_servers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[LDAPServer], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_ldap_servers" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[LDAPServer]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_ntp_servers(self, **kwargs): # noqa: E501
"""get_all_ntp_servers # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ntp_servers(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str address: Filter the returned list by `address`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[NTPServer]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_ntp_servers_with_http_info(**kwargs) # noqa: E501
def get_all_ntp_servers_with_http_info(self, **kwargs): # noqa: E501
"""get_all_ntp_servers # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_ntp_servers_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str address: Filter the returned list by `address`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[NTPServer], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['address', 'ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_ntp_servers" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'address' in local_var_params and local_var_params['address'] is not None: # noqa: E501
query_params.append(('address', local_var_params['address'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[NTPServer]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_storage_nodes(self, **kwargs): # noqa: E501
"""get_all_storage_nodes # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_storage_nodes(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str type: Filter the returned list by `type`.
:param str backend: Filter the returned list by `backend`.
:param str name: Filter the returned list by `name`.
:param str address: Filter the returned list by `address`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param bool include_status:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[StorageNode]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_storage_nodes_with_http_info(**kwargs) # noqa: E501
def get_all_storage_nodes_with_http_info(self, **kwargs): # noqa: E501
"""get_all_storage_nodes # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_storage_nodes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str type: Filter the returned list by `type`.
:param str backend: Filter the returned list by `backend`.
:param str name: Filter the returned list by `name`.
:param str address: Filter the returned list by `address`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param bool include_status:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[StorageNode], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['type', 'backend', 'name', 'address', 'ordering', 'limit', 'offset', 'include_status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_storage_nodes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'type' in local_var_params and local_var_params['type'] is not None: # noqa: E501
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'backend' in local_var_params and local_var_params['backend'] is not None: # noqa: E501
query_params.append(('backend', local_var_params['backend'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'address' in local_var_params and local_var_params['address'] is not None: # noqa: E501
query_params.append(('address', local_var_params['address'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'include_status' in local_var_params and local_var_params['include_status'] is not None: # noqa: E501
query_params.append(('include_status', local_var_params['include_status'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[StorageNode]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_users(self, **kwargs): # noqa: E501
"""get_all_users # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str username: Filter the returned list by `username`.
:param str home: Filter the returned list by `home`.
:param str full_name: Filter the returned list by `full_name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param bool include_allowed_fs_paths:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ElementsUser]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_users_with_http_info(**kwargs) # noqa: E501
def get_all_users_with_http_info(self, **kwargs): # noqa: E501
"""get_all_users # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_users_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str username: Filter the returned list by `username`.
:param str home: Filter the returned list by `home`.
:param str full_name: Filter the returned list by `full_name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param bool include_allowed_fs_paths:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ElementsUser], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'home', 'full_name', 'ordering', 'limit', 'offset', 'include_allowed_fs_paths'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_users" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'username' in local_var_params and local_var_params['username'] is not None: # noqa: E501
query_params.append(('username', local_var_params['username'])) # noqa: E501
if 'home' in local_var_params and local_var_params['home'] is not None: # noqa: E501
query_params.append(('home', local_var_params['home'])) # noqa: E501
if 'full_name' in local_var_params and local_var_params['full_name'] is not None: # noqa: E501
query_params.append(('full_name', local_var_params['full_name'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'include_allowed_fs_paths' in local_var_params and local_var_params['include_allowed_fs_paths'] is not None: # noqa: E501
query_params.append(('include_allowed_fs_paths', local_var_params['include_allowed_fs_paths'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ElementsUser]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_workstations(self, **kwargs): # noqa: E501
"""get_all_workstations # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_workstations(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str hostname: Filter the returned list by `hostname`.
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[Workstation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_all_workstations_with_http_info(**kwargs) # noqa: E501
def get_all_workstations_with_http_info(self, **kwargs): # noqa: E501
"""get_all_workstations # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_workstations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str hostname: Filter the returned list by `hostname`.
:param str name: Filter the returned list by `name`.
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[Workstation], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['hostname', 'name', 'ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_workstations" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'hostname' in local_var_params and local_var_params['hostname'] is not None: # noqa: E501
query_params.append(('hostname', local_var_params['hostname'])) # noqa: E501
if 'name' in local_var_params and local_var_params['name'] is not None: # noqa: E501
query_params.append(('name', local_var_params['name'])) # noqa: E501
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Workstation]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_certificate_configuration(self, **kwargs): # noqa: E501
"""get_certificate_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_certificate_configuration(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Certificate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_certificate_configuration_with_http_info(**kwargs) # noqa: E501
def get_certificate_configuration_with_http_info(self, **kwargs): # noqa: E501
"""get_certificate_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_certificate_configuration_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Certificate, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_certificate_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/certificate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Certificate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_client_download_file(self, file, **kwargs): # noqa: E501
"""get_client_download_file # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_download_file(file, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str file: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_client_download_file_with_http_info(file, **kwargs) # noqa: E501
def get_client_download_file_with_http_info(self, file, **kwargs): # noqa: E501
"""get_client_download_file # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_download_file_with_http_info(file, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str file: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['file'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_client_download_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'file' is set
if self.api_client.client_side_validation and ('file' not in local_var_params or # noqa: E501
local_var_params['file'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `file` when calling `get_client_download_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file' in local_var_params:
path_params['file'] = local_var_params['file'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads/clients/{file}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_client_downloads(self, **kwargs): # noqa: E501
"""get_client_downloads # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_downloads(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ClientsEndpointResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_client_downloads_with_http_info(**kwargs) # noqa: E501
def get_client_downloads_with_http_info(self, **kwargs): # noqa: E501
"""get_client_downloads # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_downloads_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ClientsEndpointResponse], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_client_downloads" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads/clients', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ClientsEndpointResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_client_session(self, id, **kwargs): # noqa: E501
"""get_client_session # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_session(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this client session. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ClientSession
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_client_session_with_http_info(id, **kwargs) # noqa: E501
def get_client_session_with_http_info(self, id, **kwargs): # noqa: E501
"""get_client_session # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_client_session_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this client session. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ClientSession, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_client_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_client_session`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/client-sessions/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ClientSession', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_current_workstation(self, **kwargs): # noqa: E501
"""get_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_current_workstation(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_current_workstation_with_http_info(**kwargs) # noqa: E501
def get_current_workstation_with_http_info(self, **kwargs): # noqa: E501
"""get_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_current_workstation_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_current_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/current', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_download(self, id, **kwargs): # noqa: E501
"""get_download # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Download
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_download_with_http_info(id, **kwargs) # noqa: E501
def get_download_with_http_info(self, id, **kwargs): # noqa: E501
"""get_download # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Download, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_download" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_download`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Download', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_download_archive(self, id, **kwargs): # noqa: E501
"""get_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_archive(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DownloadArchive
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_download_archive_with_http_info(id, **kwargs) # noqa: E501
def get_download_archive_with_http_info(self, id, **kwargs): # noqa: E501
"""get_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_archive_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DownloadArchive, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_download_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_download_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadArchive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_download_archive_file(self, id, **kwargs): # noqa: E501
"""get_download_archive_file # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_archive_file(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_download_archive_file_with_http_info(id, **kwargs) # noqa: E501
def get_download_archive_file_with_http_info(self, id, **kwargs): # noqa: E501
"""get_download_archive_file # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_archive_file_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_download_archive_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_download_archive_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/{id}/download', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_download_file(self, id, **kwargs): # noqa: E501
"""get_download_file # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_file(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_download_file_with_http_info(id, **kwargs) # noqa: E501
def get_download_file_with_http_info(self, id, **kwargs): # noqa: E501
"""get_download_file # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_file_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_download_file" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_download_file`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads/{id}/download', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_download_icon(self, id, **kwargs): # noqa: E501
"""get_download_icon # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_icon(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_download_icon_with_http_info(id, **kwargs) # noqa: E501
def get_download_icon_with_http_info(self, id, **kwargs): # noqa: E501
"""get_download_icon # noqa: E501
### Required permissions * User account permission: `downloads:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_download_icon_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this download. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_download_icon" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_download_icon`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/downloads/{id}/icon', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_group(self, id, **kwargs): # noqa: E501
"""get_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsGroupDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_group_with_http_info(id, **kwargs) # noqa: E501
def get_group_with_http_info(self, id, **kwargs): # noqa: E501
"""get_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsGroupDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsGroupDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_home_workspace(self, id, **kwargs): # noqa: E501
"""get_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_home_workspace(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workspace
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_home_workspace_with_http_info(id, **kwargs) # noqa: E501
def get_home_workspace_with_http_info(self, id, **kwargs): # noqa: E501
"""get_home_workspace # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_home_workspace_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workspace, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_home_workspace" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_home_workspace`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/home', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workspace', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ipmi_configuration(self, id, **kwargs): # noqa: E501
"""get_ipmi_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ipmi_configuration(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Ipmi
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_ipmi_configuration_with_http_info(id, **kwargs) # noqa: E501
def get_ipmi_configuration_with_http_info(self, id, **kwargs): # noqa: E501
"""get_ipmi_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ipmi_configuration_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Ipmi, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ipmi_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_ipmi_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/ipmi', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Ipmi', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ldap_server(self, id, **kwargs): # noqa: E501
"""get_ldap_server # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: LDAPServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_ldap_server_with_http_info(id, **kwargs) # noqa: E501
def get_ldap_server_with_http_info(self, id, **kwargs): # noqa: E501
"""get_ldap_server # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(LDAPServer, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ldap_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_ldap_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LDAPServer', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ldap_server_groups(self, id, **kwargs): # noqa: E501
"""get_ldap_server_groups # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: LDAPServerGroups
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_ldap_server_groups_with_http_info(id, **kwargs) # noqa: E501
def get_ldap_server_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""get_ldap_server_groups # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(LDAPServerGroups, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ldap_server_groups" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_ldap_server_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers/{id}/groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LDAPServerGroups', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ldap_server_users(self, id, **kwargs): # noqa: E501
"""get_ldap_server_users # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server_users(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: LDAPServerUsers
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_ldap_server_users_with_http_info(id, **kwargs) # noqa: E501
def get_ldap_server_users_with_http_info(self, id, **kwargs): # noqa: E501
"""get_ldap_server_users # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ldap_server_users_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(LDAPServerUsers, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ldap_server_users" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_ldap_server_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers/{id}/users', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LDAPServerUsers', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_license(self, **kwargs): # noqa: E501
"""get_license # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_license(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: License
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_license_with_http_info(**kwargs) # noqa: E501
def get_license_with_http_info(self, **kwargs): # noqa: E501
"""get_license # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_license_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(License, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_license" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/license', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='License', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_local_time(self, **kwargs): # noqa: E501
"""get_local_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_local_time(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TimeEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_local_time_with_http_info(**kwargs) # noqa: E501
def get_local_time_with_http_info(self, **kwargs): # noqa: E501
"""get_local_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_local_time_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TimeEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_local_time" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_log(self, path, **kwargs): # noqa: E501
"""get_log # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_log(path, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str path: (required)
:param int offset:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_log_with_http_info(path, **kwargs) # noqa: E501
def get_log_with_http_info(self, path, **kwargs): # noqa: E501
"""get_log # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_log_with_http_info(path, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str path: (required)
:param int offset:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['path', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_log" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'path' is set
if self.api_client.client_side_validation and ('path' not in local_var_params or # noqa: E501
local_var_params['path'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `path` when calling `get_log`") # noqa: E501
if self.api_client.client_side_validation and 'path' in local_var_params and not re.search(r'.*', local_var_params['path']): # noqa: E501
raise ApiValueError("Invalid value for parameter `path` when calling `get_log`, must conform to the pattern `/.*/`") # noqa: E501
collection_formats = {}
path_params = {}
if 'path' in local_var_params:
path_params['path'] = local_var_params['path'] # noqa: E501
query_params = []
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/log/{path}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_node_ipmi_sensors(self, id, **kwargs): # noqa: E501
"""get_node_ipmi_sensors # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_node_ipmi_sensors(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Sensors
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_node_ipmi_sensors_with_http_info(id, **kwargs) # noqa: E501
def get_node_ipmi_sensors_with_http_info(self, id, **kwargs): # noqa: E501
"""get_node_ipmi_sensors # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_node_ipmi_sensors_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Sensors, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_node_ipmi_sensors" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_node_ipmi_sensors`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/sensors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Sensors', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_node_stats(self, id, **kwargs): # noqa: E501
"""get_node_stats # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_node_stats(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Stats
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_node_stats_with_http_info(id, **kwargs) # noqa: E501
def get_node_stats_with_http_info(self, id, **kwargs): # noqa: E501
"""get_node_stats # noqa: E501
### Required permissions * User account permission: `system:status:view` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_node_stats_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Stats, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_node_stats" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_node_stats`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/stats', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Stats', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ntp_server(self, id, **kwargs): # noqa: E501
"""get_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ntp_server(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NTPServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_ntp_server_with_http_info(id, **kwargs) # noqa: E501
def get_ntp_server_with_http_info(self, id, **kwargs): # noqa: E501
"""get_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ntp_server_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NTPServer, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ntp_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_ntp_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NTPServer', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_parameters(self, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameters(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Parameters
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_parameters_with_http_info(**kwargs) # noqa: E501
def get_parameters_with_http_info(self, **kwargs): # noqa: E501
"""get_parameters # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_parameters_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Parameters, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_parameters" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/parameters', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Parameters', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_profile(self, **kwargs): # noqa: E501
"""get_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_profile(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_profile_with_http_info(**kwargs) # noqa: E501
def get_profile_with_http_info(self, **kwargs): # noqa: E501
"""get_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_profile_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str ordering: Which field to use when ordering the results.
:param int limit: Number of results to return per page.
:param int offset: The initial index from which to return the results.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserProfile, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ordering', 'limit', 'offset'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_profile" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'ordering' in local_var_params and local_var_params['ordering'] is not None: # noqa: E501
query_params.append(('ordering', local_var_params['ordering'])) # noqa: E501
if 'limit' in local_var_params and local_var_params['limit'] is not None: # noqa: E501
query_params.append(('limit', local_var_params['limit'])) # noqa: E501
if 'offset' in local_var_params and local_var_params['offset'] is not None: # noqa: E501
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/me', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserProfile', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_release_notes(self, **kwargs): # noqa: E501
"""get_release_notes # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_release_notes(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[ReleaseNotesEndpointResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_release_notes_with_http_info(**kwargs) # noqa: E501
def get_release_notes_with_http_info(self, **kwargs): # noqa: E501
"""get_release_notes # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_release_notes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[ReleaseNotesEndpointResponse], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_release_notes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/release-notes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[ReleaseNotesEndpointResponse]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_service_status(self, id, service, **kwargs): # noqa: E501
"""get_service_status # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_status(id, service, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param str service: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ServiceStatus
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_service_status_with_http_info(id, service, **kwargs) # noqa: E501
def get_service_status_with_http_info(self, id, service, **kwargs): # noqa: E501
"""get_service_status # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_status_with_http_info(id, service, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param str service: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ServiceStatus, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'service'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_service_status" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_service_status`") # noqa: E501
# verify the required parameter 'service' is set
if self.api_client.client_side_validation and ('service' not in local_var_params or # noqa: E501
local_var_params['service'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `service` when calling `get_service_status`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'service' in local_var_params:
path_params['service'] = local_var_params['service'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/services/{service}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ServiceStatus', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_smtp_configuration(self, **kwargs): # noqa: E501
"""get_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_smtp_configuration(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SMTPConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_smtp_configuration_with_http_info(**kwargs) # noqa: E501
def get_smtp_configuration_with_http_info(self, **kwargs): # noqa: E501
"""get_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_smtp_configuration_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SMTPConfiguration, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_smtp_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/smtp', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SMTPConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_stor_next_license(self, **kwargs): # noqa: E501
"""get_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stor_next_license(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: StorNextLicenseEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_stor_next_license_with_http_info(**kwargs) # noqa: E501
def get_stor_next_license_with_http_info(self, **kwargs): # noqa: E501
"""get_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_stor_next_license_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(StorNextLicenseEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_stor_next_license" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/stornext-license', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StorNextLicenseEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_storage_node(self, id, **kwargs): # noqa: E501
"""get_storage_node # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_node(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param bool include_status:
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: StorageNode
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_storage_node_with_http_info(id, **kwargs) # noqa: E501
def get_storage_node_with_http_info(self, id, **kwargs): # noqa: E501
"""get_storage_node # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_storage_node_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param bool include_status:
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(StorageNode, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'include_status'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_storage_node" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_storage_node`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include_status' in local_var_params and local_var_params['include_status'] is not None: # noqa: E501
query_params.append(('include_status', local_var_params['include_status'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StorageNode', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_system_info(self, **kwargs): # noqa: E501
"""get_system_info # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_system_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SystemInfoEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_system_info_with_http_info(**kwargs) # noqa: E501
def get_system_info_with_http_info(self, **kwargs): # noqa: E501
"""get_system_info # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_system_info_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SystemInfoEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_system_info" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/info', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SystemInfoEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user(self, id, **kwargs): # noqa: E501
"""get_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_user_with_http_info(id, **kwargs) # noqa: E501
def get_user_with_http_info(self, id, **kwargs): # noqa: E501
"""get_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_workstation(self, id, **kwargs): # noqa: E501
"""get_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workstation(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_workstation_with_http_info(id, **kwargs) # noqa: E501
def get_workstation_with_http_info(self, id, **kwargs): # noqa: E501
"""get_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_workstation_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `get_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def install_stor_next_license(self, stornext_license, **kwargs): # noqa: E501
"""install_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.install_stor_next_license(stornext_license, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param StornextLicense stornext_license: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: StorNextLicenseEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.install_stor_next_license_with_http_info(stornext_license, **kwargs) # noqa: E501
def install_stor_next_license_with_http_info(self, stornext_license, **kwargs): # noqa: E501
"""install_stor_next_license # noqa: E501
### Required permissions * User account permission: `system:admin-access` * License component: stornext_mdc # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.install_stor_next_license_with_http_info(stornext_license, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param StornextLicense stornext_license: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(StorNextLicenseEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['stornext_license'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method install_stor_next_license" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'stornext_license' is set
if self.api_client.client_side_validation and ('stornext_license' not in local_var_params or # noqa: E501
local_var_params['stornext_license'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `stornext_license` when calling `install_stor_next_license`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'stornext_license' in local_var_params:
body_params = local_var_params['stornext_license']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/stornext-license', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='StorNextLicenseEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_current_workstation(self, workstation_partial_update, **kwargs): # noqa: E501
"""patch_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_current_workstation(workstation_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param WorkstationPartialUpdate workstation_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_current_workstation_with_http_info(workstation_partial_update, **kwargs) # noqa: E501
def patch_current_workstation_with_http_info(self, workstation_partial_update, **kwargs): # noqa: E501
"""patch_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_current_workstation_with_http_info(workstation_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param WorkstationPartialUpdate workstation_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['workstation_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_current_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workstation_partial_update' is set
if self.api_client.client_side_validation and ('workstation_partial_update' not in local_var_params or # noqa: E501
local_var_params['workstation_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workstation_partial_update` when calling `patch_current_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workstation_partial_update' in local_var_params:
body_params = local_var_params['workstation_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/current', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_download_archive(self, id, download_archive_partial_update, **kwargs): # noqa: E501
"""patch_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_download_archive(id, download_archive_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param DownloadArchivePartialUpdate download_archive_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DownloadArchive
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_download_archive_with_http_info(id, download_archive_partial_update, **kwargs) # noqa: E501
def patch_download_archive_with_http_info(self, id, download_archive_partial_update, **kwargs): # noqa: E501
"""patch_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_download_archive_with_http_info(id, download_archive_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param DownloadArchivePartialUpdate download_archive_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DownloadArchive, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'download_archive_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_download_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `patch_download_archive`") # noqa: E501
# verify the required parameter 'download_archive_partial_update' is set
if self.api_client.client_side_validation and ('download_archive_partial_update' not in local_var_params or # noqa: E501
local_var_params['download_archive_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `download_archive_partial_update` when calling `patch_download_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'download_archive_partial_update' in local_var_params:
body_params = local_var_params['download_archive_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadArchive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_group(self, id, elements_group_detail_partial_update, **kwargs): # noqa: E501
"""patch_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_group(id, elements_group_detail_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param ElementsGroupDetailPartialUpdate elements_group_detail_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsGroupDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_group_with_http_info(id, elements_group_detail_partial_update, **kwargs) # noqa: E501
def patch_group_with_http_info(self, id, elements_group_detail_partial_update, **kwargs): # noqa: E501
"""patch_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_group_with_http_info(id, elements_group_detail_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param ElementsGroupDetailPartialUpdate elements_group_detail_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsGroupDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'elements_group_detail_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `patch_group`") # noqa: E501
# verify the required parameter 'elements_group_detail_partial_update' is set
if self.api_client.client_side_validation and ('elements_group_detail_partial_update' not in local_var_params or # noqa: E501
local_var_params['elements_group_detail_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_group_detail_partial_update` when calling `patch_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_group_detail_partial_update' in local_var_params:
body_params = local_var_params['elements_group_detail_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsGroupDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_ntp_server(self, id, ntp_server_partial_update, **kwargs): # noqa: E501
"""patch_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_ntp_server(id, ntp_server_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param NTPServerPartialUpdate ntp_server_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NTPServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_ntp_server_with_http_info(id, ntp_server_partial_update, **kwargs) # noqa: E501
def patch_ntp_server_with_http_info(self, id, ntp_server_partial_update, **kwargs): # noqa: E501
"""patch_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_ntp_server_with_http_info(id, ntp_server_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param NTPServerPartialUpdate ntp_server_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NTPServer, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'ntp_server_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_ntp_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `patch_ntp_server`") # noqa: E501
# verify the required parameter 'ntp_server_partial_update' is set
if self.api_client.client_side_validation and ('ntp_server_partial_update' not in local_var_params or # noqa: E501
local_var_params['ntp_server_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ntp_server_partial_update` when calling `patch_ntp_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ntp_server_partial_update' in local_var_params:
body_params = local_var_params['ntp_server_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NTPServer', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_profile(self, elements_user_profile_partial_update, **kwargs): # noqa: E501
"""patch_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_profile(elements_user_profile_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserProfilePartialUpdate elements_user_profile_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_profile_with_http_info(elements_user_profile_partial_update, **kwargs) # noqa: E501
def patch_profile_with_http_info(self, elements_user_profile_partial_update, **kwargs): # noqa: E501
"""patch_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_profile_with_http_info(elements_user_profile_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserProfilePartialUpdate elements_user_profile_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserProfile, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['elements_user_profile_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_profile" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'elements_user_profile_partial_update' is set
if self.api_client.client_side_validation and ('elements_user_profile_partial_update' not in local_var_params or # noqa: E501
local_var_params['elements_user_profile_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_user_profile_partial_update` when calling `patch_profile`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_user_profile_partial_update' in local_var_params:
body_params = local_var_params['elements_user_profile_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/me', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserProfile', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_user(self, id, elements_user_detail_partial_update, **kwargs): # noqa: E501
"""patch_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_user(id, elements_user_detail_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ElementsUserDetailPartialUpdate elements_user_detail_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_user_with_http_info(id, elements_user_detail_partial_update, **kwargs) # noqa: E501
def patch_user_with_http_info(self, id, elements_user_detail_partial_update, **kwargs): # noqa: E501
"""patch_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_user_with_http_info(id, elements_user_detail_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ElementsUserDetailPartialUpdate elements_user_detail_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'elements_user_detail_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `patch_user`") # noqa: E501
# verify the required parameter 'elements_user_detail_partial_update' is set
if self.api_client.client_side_validation and ('elements_user_detail_partial_update' not in local_var_params or # noqa: E501
local_var_params['elements_user_detail_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_user_detail_partial_update` when calling `patch_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_user_detail_partial_update' in local_var_params:
body_params = local_var_params['elements_user_detail_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_workstation(self, id, workstation_partial_update, **kwargs): # noqa: E501
"""patch_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_workstation(id, workstation_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param WorkstationPartialUpdate workstation_partial_update: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.patch_workstation_with_http_info(id, workstation_partial_update, **kwargs) # noqa: E501
def patch_workstation_with_http_info(self, id, workstation_partial_update, **kwargs): # noqa: E501
"""patch_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_workstation_with_http_info(id, workstation_partial_update, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param WorkstationPartialUpdate workstation_partial_update: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'workstation_partial_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `patch_workstation`") # noqa: E501
# verify the required parameter 'workstation_partial_update' is set
if self.api_client.client_side_validation and ('workstation_partial_update' not in local_var_params or # noqa: E501
local_var_params['workstation_partial_update'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workstation_partial_update` when calling `patch_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workstation_partial_update' in local_var_params:
body_params = local_var_params['workstation_partial_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def preview_user(self, user_preview_request, **kwargs): # noqa: E501
"""preview_user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_user(user_preview_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param UserPreviewRequest user_preview_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: UserPreviewResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.preview_user_with_http_info(user_preview_request, **kwargs) # noqa: E501
def preview_user_with_http_info(self, user_preview_request, **kwargs): # noqa: E501
"""preview_user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.preview_user_with_http_info(user_preview_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param UserPreviewRequest user_preview_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(UserPreviewResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['user_preview_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method preview_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'user_preview_request' is set
if self.api_client.client_side_validation and ('user_preview_request' not in local_var_params or # noqa: E501
local_var_params['user_preview_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `user_preview_request` when calling `preview_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'user_preview_request' in local_var_params:
body_params = local_var_params['user_preview_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/preview', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserPreviewResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def reboot(self, **kwargs): # noqa: E501
"""reboot # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reboot(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.reboot_with_http_info(**kwargs) # noqa: E501
def reboot_with_http_info(self, **kwargs): # noqa: E501
"""reboot # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reboot_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method reboot" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/reboot', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def register_upload(self, register_upload_endpoint_request, **kwargs): # noqa: E501
"""register_upload # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_upload(register_upload_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegisterUploadEndpointRequest register_upload_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.register_upload_with_http_info(register_upload_endpoint_request, **kwargs) # noqa: E501
def register_upload_with_http_info(self, register_upload_endpoint_request, **kwargs): # noqa: E501
"""register_upload # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_upload_with_http_info(register_upload_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegisterUploadEndpointRequest register_upload_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['register_upload_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method register_upload" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'register_upload_endpoint_request' is set
if self.api_client.client_side_validation and ('register_upload_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['register_upload_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `register_upload_endpoint_request` when calling `register_upload`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'register_upload_endpoint_request' in local_var_params:
body_params = local_var_params['register_upload_endpoint_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/uploads/register', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def register_upload_metadata(self, register_upload_metadata_endpoint_request, **kwargs): # noqa: E501
"""register_upload_metadata # noqa: E501
### Required permissions * User account permission: `media:access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_upload_metadata(register_upload_metadata_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegisterUploadMetadataEndpointRequest register_upload_metadata_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.register_upload_metadata_with_http_info(register_upload_metadata_endpoint_request, **kwargs) # noqa: E501
def register_upload_metadata_with_http_info(self, register_upload_metadata_endpoint_request, **kwargs): # noqa: E501
"""register_upload_metadata # noqa: E501
### Required permissions * User account permission: `media:access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.register_upload_metadata_with_http_info(register_upload_metadata_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param RegisterUploadMetadataEndpointRequest register_upload_metadata_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['register_upload_metadata_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method register_upload_metadata" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'register_upload_metadata_endpoint_request' is set
if self.api_client.client_side_validation and ('register_upload_metadata_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['register_upload_metadata_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `register_upload_metadata_endpoint_request` when calling `register_upload_metadata`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'register_upload_metadata_endpoint_request' in local_var_params:
body_params = local_var_params['register_upload_metadata_endpoint_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/uploads/metadata', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def render_email_template_preview(self, email_preview, **kwargs): # noqa: E501
"""render_email_template_preview # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.render_email_template_preview(email_preview, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param EmailPreview email_preview: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.render_email_template_preview_with_http_info(email_preview, **kwargs) # noqa: E501
def render_email_template_preview_with_http_info(self, email_preview, **kwargs): # noqa: E501
"""render_email_template_preview # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.render_email_template_preview_with_http_info(email_preview, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param EmailPreview email_preview: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['email_preview'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method render_email_template_preview" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'email_preview' is set
if self.api_client.client_side_validation and ('email_preview' not in local_var_params or # noqa: E501
local_var_params['email_preview'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `email_preview` when calling `render_email_template_preview`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'email_preview' in local_var_params:
body_params = local_var_params['email_preview']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/smtp/preview', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def reset_user_password(self, id, **kwargs): # noqa: E501
"""reset_user_password # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_user_password(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.reset_user_password_with_http_info(id, **kwargs) # noqa: E501
def reset_user_password_with_http_info(self, id, **kwargs): # noqa: E501
"""reset_user_password # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.reset_user_password_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method reset_user_password" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `reset_user_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/password/reset', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def run_service_operation(self, id, operation, service, **kwargs): # noqa: E501
"""run_service_operation # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_service_operation(id, operation, service, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param str operation: (required)
:param str service: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.run_service_operation_with_http_info(id, operation, service, **kwargs) # noqa: E501
def run_service_operation_with_http_info(self, id, operation, service, **kwargs): # noqa: E501
"""run_service_operation # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.run_service_operation_with_http_info(id, operation, service, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param str operation: (required)
:param str service: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'operation', 'service'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method run_service_operation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `run_service_operation`") # noqa: E501
# verify the required parameter 'operation' is set
if self.api_client.client_side_validation and ('operation' not in local_var_params or # noqa: E501
local_var_params['operation'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `operation` when calling `run_service_operation`") # noqa: E501
# verify the required parameter 'service' is set
if self.api_client.client_side_validation and ('service' not in local_var_params or # noqa: E501
local_var_params['service'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `service` when calling `run_service_operation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
if 'operation' in local_var_params:
path_params['operation'] = local_var_params['operation'] # noqa: E501
if 'service' in local_var_params:
path_params['service'] = local_var_params['service'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/services/{service}/{operation}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_ipmi_configuration(self, id, ipmi, **kwargs): # noqa: E501
"""set_ipmi_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_ipmi_configuration(id, ipmi, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param Ipmi ipmi: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Ipmi
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_ipmi_configuration_with_http_info(id, ipmi, **kwargs) # noqa: E501
def set_ipmi_configuration_with_http_info(self, id, ipmi, **kwargs): # noqa: E501
"""set_ipmi_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_ipmi_configuration_with_http_info(id, ipmi, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Storage Node. (required)
:param Ipmi ipmi: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Ipmi, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'ipmi'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_ipmi_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `set_ipmi_configuration`") # noqa: E501
# verify the required parameter 'ipmi' is set
if self.api_client.client_side_validation and ('ipmi' not in local_var_params or # noqa: E501
local_var_params['ipmi'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ipmi` when calling `set_ipmi_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ipmi' in local_var_params:
body_params = local_var_params['ipmi']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/nodes/{id}/ipmi', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Ipmi', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_local_time(self, time_endpoint_request, **kwargs): # noqa: E501
"""set_local_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_local_time(time_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TimeEndpointRequest time_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TimeEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_local_time_with_http_info(time_endpoint_request, **kwargs) # noqa: E501
def set_local_time_with_http_info(self, time_endpoint_request, **kwargs): # noqa: E501
"""set_local_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_local_time_with_http_info(time_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TimeEndpointRequest time_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TimeEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['time_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_local_time" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'time_endpoint_request' is set
if self.api_client.client_side_validation and ('time_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['time_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `time_endpoint_request` when calling `set_local_time`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'time_endpoint_request' in local_var_params:
body_params = local_var_params['time_endpoint_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_my_password(self, change_own_password_request, **kwargs): # noqa: E501
"""set_my_password # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_my_password(change_own_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ChangeOwnPasswordRequest change_own_password_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_my_password_with_http_info(change_own_password_request, **kwargs) # noqa: E501
def set_my_password_with_http_info(self, change_own_password_request, **kwargs): # noqa: E501
"""set_my_password # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_my_password_with_http_info(change_own_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ChangeOwnPasswordRequest change_own_password_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['change_own_password_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_my_password" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'change_own_password_request' is set
if self.api_client.client_side_validation and ('change_own_password_request' not in local_var_params or # noqa: E501
local_var_params['change_own_password_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `change_own_password_request` when calling `set_my_password`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_own_password_request' in local_var_params:
body_params = local_var_params['change_own_password_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/me/password', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def set_user_password(self, id, change_password_request, **kwargs): # noqa: E501
"""set_user_password # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_user_password(id, change_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ChangePasswordRequest change_password_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.set_user_password_with_http_info(id, change_password_request, **kwargs) # noqa: E501
def set_user_password_with_http_info(self, id, change_password_request, **kwargs): # noqa: E501
"""set_user_password # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.set_user_password_with_http_info(id, change_password_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ChangePasswordRequest change_password_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'change_password_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method set_user_password" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `set_user_password`") # noqa: E501
# verify the required parameter 'change_password_request' is set
if self.api_client.client_side_validation and ('change_password_request' not in local_var_params or # noqa: E501
local_var_params['change_password_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `change_password_request` when calling `set_user_password`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'change_password_request' in local_var_params:
body_params = local_var_params['change_password_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/password', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def shutdown(self, **kwargs): # noqa: E501
"""shutdown # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.shutdown(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.shutdown_with_http_info(**kwargs) # noqa: E501
def shutdown_with_http_info(self, **kwargs): # noqa: E501
"""shutdown # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.shutdown_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method shutdown" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/shutdown', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def start_solr_reindex(self, **kwargs): # noqa: E501
"""start_solr_reindex # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_solr_reindex(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SolrReindexEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.start_solr_reindex_with_http_info(**kwargs) # noqa: E501
def start_solr_reindex_with_http_info(self, **kwargs): # noqa: E501
"""start_solr_reindex # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_solr_reindex_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SolrReindexEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method start_solr_reindex" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/solr/reindex', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SolrReindexEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def start_support_session(self, **kwargs): # noqa: E501
"""start_support_session # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_support_session(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TaskInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.start_support_session_with_http_info(**kwargs) # noqa: E501
def start_support_session_with_http_info(self, **kwargs): # noqa: E501
"""start_support_session # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_support_session_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TaskInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method start_support_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/support-session/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def start_system_backup(self, path, **kwargs): # noqa: E501
"""start_system_backup # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_system_backup(path, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Path path: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TaskInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.start_system_backup_with_http_info(path, **kwargs) # noqa: E501
def start_system_backup_with_http_info(self, path, **kwargs): # noqa: E501
"""start_system_backup # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.start_system_backup_with_http_info(path, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Path path: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TaskInfo, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['path'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method start_system_backup" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'path' is set
if self.api_client.client_side_validation and ('path' not in local_var_params or # noqa: E501
local_var_params['path'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `path` when calling `start_system_backup`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'path' in local_var_params:
body_params = local_var_params['path']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/backup/start', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TaskInfo', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def sync_ldap_group(self, id, **kwargs): # noqa: E501
"""sync_ldap_group # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_ldap_group(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.sync_ldap_group_with_http_info(id, **kwargs) # noqa: E501
def sync_ldap_group_with_http_info(self, id, **kwargs): # noqa: E501
"""sync_ldap_group # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_ldap_group_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method sync_ldap_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `sync_ldap_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups/{id}/ldap-sync', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def sync_ldap_users(self, id, **kwargs): # noqa: E501
"""sync_ldap_users # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_ldap_users(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.sync_ldap_users_with_http_info(id, **kwargs) # noqa: E501
def sync_ldap_users_with_http_info(self, id, **kwargs): # noqa: E501
"""sync_ldap_users # noqa: E501
### Required permissions * User account permission: `users:manage` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_ldap_users_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this LDAP Server. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method sync_ldap_users" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `sync_ldap_users`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/ldap-servers/{id}/sync-users', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def sync_time(self, time_sync_endpoint_request, **kwargs): # noqa: E501
"""sync_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_time(time_sync_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TimeSyncEndpointRequest time_sync_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: TimeSyncEndpointResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.sync_time_with_http_info(time_sync_endpoint_request, **kwargs) # noqa: E501
def sync_time_with_http_info(self, time_sync_endpoint_request, **kwargs): # noqa: E501
"""sync_time # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_time_with_http_info(time_sync_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TimeSyncEndpointRequest time_sync_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(TimeSyncEndpointResponse, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['time_sync_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method sync_time" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'time_sync_endpoint_request' is set
if self.api_client.client_side_validation and ('time_sync_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['time_sync_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `time_sync_endpoint_request` when calling `sync_time`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'time_sync_endpoint_request' in local_var_params:
body_params = local_var_params['time_sync_endpoint_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/sync', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TimeSyncEndpointResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def sync_user_totp(self, id, sync_totp_request, **kwargs): # noqa: E501
"""sync_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_user_totp(id, sync_totp_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param SyncTOTPRequest sync_totp_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SyncTOTP
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.sync_user_totp_with_http_info(id, sync_totp_request, **kwargs) # noqa: E501
def sync_user_totp_with_http_info(self, id, sync_totp_request, **kwargs): # noqa: E501
"""sync_user_totp # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.sync_user_totp_with_http_info(id, sync_totp_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param SyncTOTPRequest sync_totp_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SyncTOTP, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'sync_totp_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method sync_user_totp" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `sync_user_totp`") # noqa: E501
# verify the required parameter 'sync_totp_request' is set
if self.api_client.client_side_validation and ('sync_totp_request' not in local_var_params or # noqa: E501
local_var_params['sync_totp_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `sync_totp_request` when calling `sync_user_totp`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'sync_totp_request' in local_var_params:
body_params = local_var_params['sync_totp_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}/totp', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SyncTOTP', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def test_smtp_configuration(self, test_smtp, **kwargs): # noqa: E501
"""test_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_smtp_configuration(test_smtp, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TestSMTP test_smtp: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.test_smtp_configuration_with_http_info(test_smtp, **kwargs) # noqa: E501
def test_smtp_configuration_with_http_info(self, test_smtp, **kwargs): # noqa: E501
"""test_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.test_smtp_configuration_with_http_info(test_smtp, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param TestSMTP test_smtp: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['test_smtp'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method test_smtp_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'test_smtp' is set
if self.api_client.client_side_validation and ('test_smtp' not in local_var_params or # noqa: E501
local_var_params['test_smtp'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `test_smtp` when calling `test_smtp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'test_smtp' in local_var_params:
body_params = local_var_params['test_smtp']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/smtp/test', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_certificate_configuration(self, certificate, **kwargs): # noqa: E501
"""update_certificate_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_certificate_configuration(certificate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Certificate certificate: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Certificate
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_certificate_configuration_with_http_info(certificate, **kwargs) # noqa: E501
def update_certificate_configuration_with_http_info(self, certificate, **kwargs): # noqa: E501
"""update_certificate_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_certificate_configuration_with_http_info(certificate, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Certificate certificate: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Certificate, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['certificate'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_certificate_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'certificate' is set
if self.api_client.client_side_validation and ('certificate' not in local_var_params or # noqa: E501
local_var_params['certificate'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `certificate` when calling `update_certificate_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'certificate' in local_var_params:
body_params = local_var_params['certificate']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/certificate', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Certificate', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_current_workstation(self, workstation, **kwargs): # noqa: E501
"""update_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_current_workstation(workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Workstation workstation: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_current_workstation_with_http_info(workstation, **kwargs) # noqa: E501
def update_current_workstation_with_http_info(self, workstation, **kwargs): # noqa: E501
"""update_current_workstation # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_current_workstation_with_http_info(workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Workstation workstation: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['workstation'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_current_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'workstation' is set
if self.api_client.client_side_validation and ('workstation' not in local_var_params or # noqa: E501
local_var_params['workstation'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workstation` when calling `update_current_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workstation' in local_var_params:
body_params = local_var_params['workstation']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/current', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_download_archive(self, id, download_archive, **kwargs): # noqa: E501
"""update_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_download_archive(id, download_archive, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param DownloadArchive download_archive: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: DownloadArchive
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_download_archive_with_http_info(id, download_archive, **kwargs) # noqa: E501
def update_download_archive_with_http_info(self, id, download_archive, **kwargs): # noqa: E501
"""update_download_archive # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_download_archive_with_http_info(id, download_archive, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A UUID string identifying this download archive. (required)
:param DownloadArchive download_archive: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(DownloadArchive, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'download_archive'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_download_archive" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_download_archive`") # noqa: E501
# verify the required parameter 'download_archive' is set
if self.api_client.client_side_validation and ('download_archive' not in local_var_params or # noqa: E501
local_var_params['download_archive'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `download_archive` when calling `update_download_archive`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'download_archive' in local_var_params:
body_params = local_var_params['download_archive']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/download-archive/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DownloadArchive', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_group(self, id, elements_group_detail, **kwargs): # noqa: E501
"""update_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_group(id, elements_group_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param ElementsGroupDetail elements_group_detail: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsGroupDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_group_with_http_info(id, elements_group_detail, **kwargs) # noqa: E501
def update_group_with_http_info(self, id, elements_group_detail, **kwargs): # noqa: E501
"""update_group # noqa: E501
### Required permissions * User account permission: `users:view` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_group_with_http_info(id, elements_group_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this Group. (required)
:param ElementsGroupDetail elements_group_detail: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsGroupDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'elements_group_detail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_group" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_group`") # noqa: E501
# verify the required parameter 'elements_group_detail' is set
if self.api_client.client_side_validation and ('elements_group_detail' not in local_var_params or # noqa: E501
local_var_params['elements_group_detail'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_group_detail` when calling `update_group`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_group_detail' in local_var_params:
body_params = local_var_params['elements_group_detail']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsGroupDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_ntp_server(self, id, ntp_server, **kwargs): # noqa: E501
"""update_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ntp_server(id, ntp_server, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param NTPServer ntp_server: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: NTPServer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_ntp_server_with_http_info(id, ntp_server, **kwargs) # noqa: E501
def update_ntp_server_with_http_info(self, id, ntp_server, **kwargs): # noqa: E501
"""update_ntp_server # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_ntp_server_with_http_info(id, ntp_server, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this NTP Server. (required)
:param NTPServer ntp_server: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(NTPServer, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'ntp_server'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_ntp_server" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_ntp_server`") # noqa: E501
# verify the required parameter 'ntp_server' is set
if self.api_client.client_side_validation and ('ntp_server' not in local_var_params or # noqa: E501
local_var_params['ntp_server'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `ntp_server` when calling `update_ntp_server`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'ntp_server' in local_var_params:
body_params = local_var_params['ntp_server']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/time/servers/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='NTPServer', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_parameters(self, parameters, **kwargs): # noqa: E501
"""update_parameters # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_parameters(parameters, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Parameters parameters: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Parameters
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_parameters_with_http_info(parameters, **kwargs) # noqa: E501
def update_parameters_with_http_info(self, parameters, **kwargs): # noqa: E501
"""update_parameters # noqa: E501
### Required permissions * <class 'rest_framework.permissions.AllowAny'> # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_parameters_with_http_info(parameters, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param Parameters parameters: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Parameters, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['parameters'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_parameters" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'parameters' is set
if self.api_client.client_side_validation and ('parameters' not in local_var_params or # noqa: E501
local_var_params['parameters'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `parameters` when calling `update_parameters`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'parameters' in local_var_params:
body_params = local_var_params['parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/parameters', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Parameters', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_profile(self, elements_user_profile, **kwargs): # noqa: E501
"""update_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_profile(elements_user_profile, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserProfile elements_user_profile: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserProfile
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_profile_with_http_info(elements_user_profile, **kwargs) # noqa: E501
def update_profile_with_http_info(self, elements_user_profile, **kwargs): # noqa: E501
"""update_profile # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_profile_with_http_info(elements_user_profile, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param ElementsUserProfile elements_user_profile: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserProfile, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['elements_user_profile'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_profile" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'elements_user_profile' is set
if self.api_client.client_side_validation and ('elements_user_profile' not in local_var_params or # noqa: E501
local_var_params['elements_user_profile'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_user_profile` when calling `update_profile`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_user_profile' in local_var_params:
body_params = local_var_params['elements_user_profile']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/me', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserProfile', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_smtp_configuration(self, smtp_configuration, **kwargs): # noqa: E501
"""update_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_smtp_configuration(smtp_configuration, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param SMTPConfiguration smtp_configuration: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: SMTPConfiguration
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_smtp_configuration_with_http_info(smtp_configuration, **kwargs) # noqa: E501
def update_smtp_configuration_with_http_info(self, smtp_configuration, **kwargs): # noqa: E501
"""update_smtp_configuration # noqa: E501
### Required permissions * User account permission: `system:admin-access` # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_smtp_configuration_with_http_info(smtp_configuration, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param SMTPConfiguration smtp_configuration: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(SMTPConfiguration, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['smtp_configuration'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_smtp_configuration" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'smtp_configuration' is set
if self.api_client.client_side_validation and ('smtp_configuration' not in local_var_params or # noqa: E501
local_var_params['smtp_configuration'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `smtp_configuration` when calling `update_smtp_configuration`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'smtp_configuration' in local_var_params:
body_params = local_var_params['smtp_configuration']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/system/smtp', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SMTPConfiguration', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user(self, id, elements_user_detail, **kwargs): # noqa: E501
"""update_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user(id, elements_user_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ElementsUserDetail elements_user_detail: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: ElementsUserDetail
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_user_with_http_info(id, elements_user_detail, **kwargs) # noqa: E501
def update_user_with_http_info(self, id, elements_user_detail, **kwargs): # noqa: E501
"""update_user # noqa: E501
### Required permissions * User account permission: `None` (read) / `users:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_with_http_info(id, elements_user_detail, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int id: A unique integer value identifying this User. (required)
:param ElementsUserDetail elements_user_detail: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(ElementsUserDetail, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'elements_user_detail'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_user`") # noqa: E501
# verify the required parameter 'elements_user_detail' is set
if self.api_client.client_side_validation and ('elements_user_detail' not in local_var_params or # noqa: E501
local_var_params['elements_user_detail'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `elements_user_detail` when calling `update_user`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'elements_user_detail' in local_var_params:
body_params = local_var_params['elements_user_detail']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/users/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ElementsUserDetail', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_workstation(self, id, workstation, **kwargs): # noqa: E501
"""update_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_workstation(id, workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param Workstation workstation: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Workstation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_workstation_with_http_info(id, workstation, **kwargs) # noqa: E501
def update_workstation_with_http_info(self, id, workstation, **kwargs): # noqa: E501
"""update_workstation # noqa: E501
### Required permissions * Authenticated user * Own workstation or User account permission: `workstations:view` (read) / `workstations:manage` (write) # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_workstation_with_http_info(id, workstation, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str id: A unique value identifying this workstation. (required)
:param Workstation workstation: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Workstation, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'workstation'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_workstation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in local_var_params or # noqa: E501
local_var_params['id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `id` when calling `update_workstation`") # noqa: E501
# verify the required parameter 'workstation' is set
if self.api_client.client_side_validation and ('workstation' not in local_var_params or # noqa: E501
local_var_params['workstation'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `workstation` when calling `update_workstation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'workstation' in local_var_params:
body_params = local_var_params['workstation']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/workstations/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Workstation', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_chunk(self, upload_chunk_endpoint_request, **kwargs): # noqa: E501
"""upload_chunk # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_chunk(upload_chunk_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param UploadChunkEndpointRequest upload_chunk_endpoint_request: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.upload_chunk_with_http_info(upload_chunk_endpoint_request, **kwargs) # noqa: E501
def upload_chunk_with_http_info(self, upload_chunk_endpoint_request, **kwargs): # noqa: E501
"""upload_chunk # noqa: E501
### Required permissions * Authenticated user # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.upload_chunk_with_http_info(upload_chunk_endpoint_request, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param UploadChunkEndpointRequest upload_chunk_endpoint_request: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['upload_chunk_endpoint_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_chunk" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'upload_chunk_endpoint_request' is set
if self.api_client.client_side_validation and ('upload_chunk_endpoint_request' not in local_var_params or # noqa: E501
local_var_params['upload_chunk_endpoint_request'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `upload_chunk_endpoint_request` when calling `upload_chunk`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'upload_chunk_endpoint_request' in local_var_params:
body_params = local_var_params['upload_chunk_endpoint_request']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/api/2/uploads/chunk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 47.747396
| 178
| 0.604071
| 60,179
| 540,978
| 5.171422
| 0.005966
| 0.04424
| 0.061405
| 0.029787
| 0.987089
| 0.983779
| 0.978066
| 0.972257
| 0.964159
| 0.955133
| 0
| 0.014913
| 0.320854
| 540,978
| 11,329
| 179
| 47.751611
| 0.832143
| 0.469522
| 0
| 0.814734
| 1
| 0
| 0.178002
| 0.056471
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041104
| false
| 0.005759
| 0.000993
| 0
| 0.083201
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9ae2bd30ba6041d000e60587f30ef9d966dbf9ae
| 218
|
py
|
Python
|
uninstall.py
|
f00kthisc0de/TermFun
|
b33b49d3ab19b1ae8877eba1c44b3a802420414d
|
[
"Apache-2.0"
] | 1
|
2021-12-06T02:48:24.000Z
|
2021-12-06T02:48:24.000Z
|
uninstall.py
|
f00kthisc0de/TermFun
|
b33b49d3ab19b1ae8877eba1c44b3a802420414d
|
[
"Apache-2.0"
] | 1
|
2021-02-13T11:57:51.000Z
|
2021-02-13T13:07:07.000Z
|
uninstall.py
|
f00kthisc0de/TermFun
|
b33b49d3ab19b1ae8877eba1c44b3a802420414d
|
[
"Apache-2.0"
] | null | null | null |
#!/data/data/com.termux/files/usr/bin/python3
from os import system as command
command("cd ..; rm -rf TermFun; rm /data/data/com.termux/files/usr/bin/TermFun; rm /data/data/com.termux/files/usr/bin/TermFunListener")
| 36.333333
| 136
| 0.752294
| 37
| 218
| 4.432432
| 0.486486
| 0.146341
| 0.20122
| 0.310976
| 0.621951
| 0.621951
| 0.621951
| 0.45122
| 0.45122
| 0
| 0
| 0.005
| 0.082569
| 218
| 5
| 137
| 43.6
| 0.815
| 0.201835
| 0
| 0
| 0
| 0.5
| 0.722543
| 0.549133
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
b1867900667d955371048f4014d7f97a57531675
| 10,557
|
py
|
Python
|
integration/core/test_replica.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
integration/core/test_replica.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
integration/core/test_replica.py
|
MalibuKoKo/longhorn-engine
|
b31b3f177ebaadb222f733073ca6015ad01ae3ef
|
[
"Apache-2.0"
] | null | null | null |
import time
import random
import datetime
import grpc
import pytest
from common import ( # NOQA
grpc_replica_client as grpc_client, # NOQA
SIZE_STR,
)
@pytest.fixture
def random_str():
return 'random-{0}-{1}'.format(random_num(), int(time.time()))
def random_num():
return random.randint(0, 1000000)
def test_create(grpc_client): # NOQA
r = grpc_client.replica_get()
assert r.state == 'initial'
assert r.size == '0'
assert r.sectorSize == 0
assert r.parent == ''
assert r.head == ''
r = grpc_client.replica_create(size=SIZE_STR)
assert r.state == 'closed'
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
def test_open(grpc_client): # NOQA
r = grpc_client.replica_get()
assert r.state == 'initial'
assert r.size == '0'
assert r.sectorSize == 0
assert r.parent == ''
assert r.head == ''
r = grpc_client.replica_create(size=SIZE_STR)
assert r.state == 'closed'
assert not r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
r = grpc_client.replica_open()
assert r.state == 'open'
assert not r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
def test_close(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
r = grpc_client.replica_open()
assert r.state == 'open'
assert not r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
r = grpc_client.replica_close()
assert r.state == 'closed'
assert not r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
def test_snapshot(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
r = grpc_client.replica_open()
assert r.state == 'open'
assert not r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
r = grpc_client.replica_snapshot(
name='000', created=datetime.datetime.utcnow().isoformat(),
labels={"name": "000", "key": "value"})
assert r.state == 'dirty'
assert r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.disks["volume-snap-000.img"].labels["name"] == "000"
assert r.disks["volume-snap-000.img"].labels["key"] == "value"
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.state == 'dirty'
assert r.dirty
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-001.img'
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img',
'volume-snap-000.img']
def test_remove_disk(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
grpc_client.replica_open()
grpc_client.replica_snapshot(
name='000', created=datetime.datetime.utcnow().isoformat())
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img',
'volume-snap-000.img']
# idempotent
grpc_client.disk_mark_as_removed(name='003')
grpc_client.disk_prepare_remove(name='003')
with pytest.raises(grpc.RpcError) as e:
grpc_client.disk_mark_as_removed(name='volume-head-002.img')
assert "Can not mark the active" in str(e.value)
with pytest.raises(grpc.RpcError) as e:
grpc_client.disk_prepare_remove(name='volume-head-002.img')
assert "Can not delete the active" in str(e.value)
grpc_client.disk_mark_as_removed(name='001')
ops = grpc_client.disk_prepare_remove(name='001').operations
assert len(ops) == 0
r = grpc_client.disk_remove(name='volume-snap-001.img')
assert r.state == 'dirty'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-000.img'
assert r.chain == ['volume-head-002.img', 'volume-snap-000.img']
def test_remove_last_disk(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
grpc_client.replica_open()
grpc_client.replica_snapshot(
name='000', created=datetime.datetime.utcnow().isoformat())
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img',
'volume-snap-000.img']
grpc_client.disk_mark_as_removed(name='volume-snap-000.img')
ops = grpc_client.disk_prepare_remove(
name='volume-snap-000.img').operations
assert len(ops) == 2
assert ops[0].action == "coalesce"
assert ops[0].source == "volume-snap-000.img"
assert ops[0].target == "volume-snap-001.img"
assert ops[1].action == "replace"
assert ops[1].source == "volume-snap-000.img"
assert ops[1].target == "volume-snap-001.img"
r = grpc_client.disk_remove(name='volume-snap-000.img')
assert r.state == 'dirty'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-001.img'
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img']
def test_reload(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
grpc_client.replica_open()
r = grpc_client.replica_get()
assert r.chain == ['volume-head-000.img']
r = grpc_client.replica_snapshot(
name='000', created=datetime.datetime.utcnow().isoformat())
assert r.chain == ['volume-head-001.img', 'volume-snap-000.img']
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img',
'volume-snap-000.img']
r = grpc_client.disk_remove(name='volume-snap-000.img')
assert r.state == 'dirty'
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-001.img'
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img']
r = grpc_client.replica_reload()
assert r.state == 'dirty'
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img']
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-001.img'
grpc_client.replica_close()
r = grpc_client.replica_open()
assert r.state == 'open'
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.chain == ['volume-head-002.img', 'volume-snap-001.img']
assert r.head == 'volume-head-002.img'
assert r.parent == 'volume-snap-001.img'
def test_reload_simple(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
r = grpc_client.replica_open()
assert r.state == 'open'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
r = grpc_client.replica_reload()
assert r.state == 'open'
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == ''
assert r.head == 'volume-head-000.img'
def test_rebuilding(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
grpc_client.replica_open()
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.state == 'dirty'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
r = grpc_client.rebuilding_set(rebuilding=True)
assert r.state == 'rebuilding'
assert r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
grpc_client.replica_close()
r = grpc_client.replica_open()
assert r.state == 'rebuilding'
assert r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
r = grpc_client.replica_reload()
assert r.state == 'rebuilding'
assert r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
def test_not_rebuilding(grpc_client): # NOQA
grpc_client.replica_create(size=SIZE_STR)
grpc_client.replica_open()
r = grpc_client.replica_snapshot(
name='001', created=datetime.datetime.utcnow().isoformat())
assert r.state == 'dirty'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
r = grpc_client.rebuilding_set(rebuilding=True)
assert r.state == 'rebuilding'
assert r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
r = grpc_client.rebuilding_set(rebuilding=False)
assert r.state == 'dirty'
assert not r.rebuilding
assert r.size == SIZE_STR
assert r.sectorSize == 512
assert r.parent == 'volume-snap-001.img'
assert r.head == 'volume-head-001.img'
assert r.chain == ['volume-head-001.img', 'volume-snap-001.img']
| 31.607784
| 68
| 0.655489
| 1,515
| 10,557
| 4.446205
| 0.058086
| 0.149644
| 0.10095
| 0.071259
| 0.932898
| 0.925624
| 0.912708
| 0.87307
| 0.837144
| 0.832987
| 0
| 0.046677
| 0.200436
| 10,557
| 333
| 69
| 31.702703
| 0.751333
| 0.006631
| 0
| 0.798507
| 0
| 0
| 0.183901
| 0
| 0
| 0
| 0
| 0
| 0.641791
| 1
| 0.044776
| false
| 0
| 0.022388
| 0.007463
| 0.074627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4917427fad711632c53178993c009fd6d632bb55
| 12,840
|
py
|
Python
|
pymc/mc_bin.py
|
cherish-web/pymc
|
9c322abfdcceca0a78b633d85da23e1290c036c8
|
[
"Apache-2.0"
] | 4
|
2021-05-01T12:43:24.000Z
|
2022-01-25T03:44:32.000Z
|
pymc/mc_bin.py
|
cherish-web/pymc
|
9c322abfdcceca0a78b633d85da23e1290c036c8
|
[
"Apache-2.0"
] | null | null | null |
pymc/mc_bin.py
|
cherish-web/pymc
|
9c322abfdcceca0a78b633d85da23e1290c036c8
|
[
"Apache-2.0"
] | 2
|
2021-07-10T03:56:08.000Z
|
2021-09-30T14:59:35.000Z
|
# _*_ coding: utf-8 _*_
# @Time : 2021/3/29 上午 09:21
# @Author : cherish_peng
# @Email : 1058386071@qq.com
# @File : mc.py
# @Software : PyCharm
from .mc_enum import EnumSubTitle, EnumCmd
from .mc_device import MelsecElement
import math
m_nNetNo = 0x00
# PLC编号
m_nPLCNo = 0xFF
# IO编号
m_nIONo = 0xFF03
# 站编号
m_nStationNo = 0x00
# CPU监视定时器,命令输出到接收应答文件时间
m_nTimeOut = 0x1000
# # 命令
# self.m_cmd = cmd.value
# # 子命令
# self.m_sub_cmd = sub_cmd.value
# # 软元件
# self.m_MelsecElement = m_melsec_element
# # 起始软元件地址
# self.m_nElementStartAddr = element_start_addr
# # 软元件数据长度
# self.m_nElementDataLen = element_data_len
# # 软元件数据
# self.m_nElementData = element_data
# # 结束代码
# self.m_end_code = end_code.value
# # 临时存放字节数组
# self.m_byte_list = []
# 数据长度 不包括软元件数据
m_nDataLen = 12
def check(melsec_element: MelsecElement, start_addr, data_len) -> bool:
"""
检查起始地址和读取长度
"""
if data_len < 0 or data_len > melsec_element.m_nLen:
return False
if start_addr < melsec_element.m_nStartAddr or \
start_addr > melsec_element.m_nEndAddr:
return False
return True
'''*************读协议内容******************************
*副标题(2)50 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|请求数据长度(2)_12
*应答超时(2)1000|命令(2)_|子命令(2)_|起始地址(3)_
*请求软元件代码(1)|请求点数长度(2)
****************************************************'''
def get_read_bytes(melsec_element: MelsecElement, start_addr, data_len) -> bytes:
"""
读数据 MC 3E帧协议
:param melsec_element: 软元件类型
:param start_addr: 起始地址
:param data_len: 读取数据长度
:return: bytes
"""
m_bytes = bytes()
if check(melsec_element, start_addr, data_len):
sub_title = EnumSubTitle.Request.value
m_cmd = EnumCmd.ReadBatch.value
m_bytes += int.to_bytes(sub_title, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nNetNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nPLCNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nIONo, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nStationNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nDataLen, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nTimeOut, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nSub_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(start_addr, 3, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nBinCode, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(data_len, 2, byteorder='little', signed=False)
else:
raise Exception('device addr or data length is error!')
return bytes(m_bytes)
def get_read_bytes_4e(number, melsec_element: MelsecElement, start_addr, data_len) -> bytes:
"""
读数据 MC 4E帧协议
:param number: 序列号
:param melsec_element: 软元件类型
:param start_addr: 起始地址
:param data_len: 读取数据长度
:return: bytes
"""
m_bytes = bytes()
if check(melsec_element, start_addr, data_len):
sub_title = EnumSubTitle.Request4e.value
m_cmd = EnumCmd.ReadBatch.value
m_bytes += int.to_bytes(sub_title, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(number, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(0, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nNetNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nPLCNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nIONo, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nStationNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nDataLen, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nTimeOut, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nSub_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(start_addr, 3, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nBinCode, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(data_len, 2, byteorder='little', signed=False)
else:
raise Exception('device addr or data length is error!')
return bytes(m_bytes)
'''*************写协议内容******************************
*副标题(2)50 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|请求数据长度(2)_12+写入数据长度
*应答超时(2)1000|命令(2)_|子命令(2)_|起始地址(3)_
*请求软元件代码(1)|请求点数长度(2)|写入数据(分按位和按字)
****************************************************'''
def get_write_bytes(melsec_element: MelsecElement, start_addr, data) -> bytes:
"""
写数据 MC 3E帧协议
:param melsec_element: 软元件类型
:param start_addr: 起始地址
:param data: 数据内容list|int
:return: bytes
"""
m_bytes = bytes()
if isinstance(data, int):
data = [data, ]
if check(melsec_element, start_addr, len(data)):
sub_title = EnumSubTitle.Request.value
m_cmd = EnumCmd.WriteBatch.value
m_bytes += int.to_bytes(sub_title, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nNetNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nPLCNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nIONo, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nStationNo, 1, byteorder='little', signed=False)
if melsec_element.m_nSub_cmd:
data_len = math.ceil(len(data)/2)
else:
data_len = len(data) * 2
total_data_len = m_nDataLen + data_len
m_bytes += int.to_bytes(total_data_len, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nTimeOut, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nSub_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(start_addr, 3, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nBinCode, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(data_len, 2, byteorder='little', signed=False)
if melsec_element.m_nSub_cmd:
for i in range(0, len(data), 2):
if i + 1 < len(data):
m_bytes += int.to_bytes(data[i] << 4 | data[i + 1], 1, byteorder='little', signed=False)
else:
m_bytes += int.to_bytes(data[i] << 4, 1, byteorder='little', signed=False)
else:
for dt in data:
m_bytes += int.to_bytes(dt, 2, byteorder='little', signed=False)
return m_bytes
'''*************写协议内容******************************
*副标题(2)54 00|序列号(2)34 12|固定值(2)00 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|请求数据长度(2)_12+写入数据长度
*应答超时(2)1000|命令(2)_|子命令(2)_|起始地址(3)_
*请求软元件代码(1)|请求点数长度(2)|写入数据(分按位和按字)
****************************************************'''
def get_write_bytes_4e(number, melsec_element: MelsecElement, start_addr, data) -> bytes:
"""
写数据 MC 4E帧协议
:param number: 序列号
:param melsec_element: 软元件类型
:param start_addr: 起始地址
:param data: 数据内容list|int
:return: bytes
"""
m_bytes = bytes()
if isinstance(data, int):
data = [data, ]
if check(melsec_element, start_addr, len(data)):
sub_title = EnumSubTitle.Request4e.value
m_cmd = EnumCmd.WriteBatch.value
m_bytes += int.to_bytes(sub_title, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(number, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(0, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nNetNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nPLCNo, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nIONo, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_nStationNo, 1, byteorder='little', signed=False)
if melsec_element.m_nSub_cmd:
data_len = math.ceil(len(data) / 2)
else:
data_len = len(data) * 2
total_data_len = m_nDataLen + data_len
m_bytes += int.to_bytes(total_data_len, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(m_nTimeOut, 2, byteorder='big', signed=False)
m_bytes += int.to_bytes(m_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nSub_cmd, 2, byteorder='little', signed=False)
m_bytes += int.to_bytes(start_addr, 3, byteorder='little', signed=False)
m_bytes += int.to_bytes(melsec_element.m_nBinCode, 1, byteorder='little', signed=False)
m_bytes += int.to_bytes(len(data), 2, byteorder='little', signed=False)
if melsec_element.m_nSub_cmd:
for i in range(0, len(data), 2):
if i+1 < len(data):
m_bytes += int.to_bytes(data[i] << 4 | data[i+1], 1, byteorder='little', signed=False)
else:
m_bytes += int.to_bytes(data[i] << 4, 1, byteorder='little', signed=False)
else:
for dt in data:
m_bytes += int.to_bytes(dt, 2, byteorder='little', signed=False)
return m_bytes
# MCBinSend: 500000FFFF03000E001000011400000000009D0100000F 按字写
# MCBinRece: D00000FFFF030002000000
'''*************读应答正常协议内容******************************
*副标题(2)D0 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|应答数据长度(2)_
*结束代码(2)00 00|应答数据部分
**********************************************************'''
def get_read_respond(byte_respond: bytes):
data_len = 0
if len(byte_respond) > 11:
data_len = int.from_bytes(byte_respond[7:9], byteorder='little', signed=False) - 2
if data_len >= len(byte_respond) - 11:
return byte_respond[11:11+data_len]
else:
return None
'''*************读应答正常协议内容******************************
*副标题(2)D4 00|序列号(2)34 12|固定值(2)00 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|应答数据长度(2)_
*结束代码(2)00 00|应答数据部分
**********************************************************'''
def get_read_respond_4e(byte_respond: bytes):
data_len = 0
if len(byte_respond) > 15:
data_len = int.from_bytes(byte_respond[11:13], byteorder='little', signed=False) - 2
if data_len >= len(byte_respond) - 15:
number = int.from_bytes(byte_respond[2:4], byteorder='little', signed=False)
return number, byte_respond[15:15+data_len], data_len+15
else:
return None
'''*************写应答正常协议内容******************************
*副标题(2)D0 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|应答数据长度(2)02 00
*结束代码(2)00 00
*D0 00 |00 |FF |FF 03| 00| 02 00 |00 00
**********************************************************'''
def get_write_respond(byte_respond: bytes):
"""
MC 3E帧回复
:param byte_respond:
:return:
"""
str_respond = ''.join(["%02X" % b for b in byte_respond])
str_temp = "D00000FFFF030002000000"
return str_respond == str_temp
'''*************写应答正常协议内容******************************
*副标题(2)D4 00|序列号(2)34 12|固定值(2)00 00|网络编号(1)00
*PLC编号(1)FF|IO编号(2)FF 03|站编号(1)00|应答数据长度(2)02 00
*结束代码(2)00 00
*D0 00 |00 |FF |FF 03| 00| 02 00 |00 00
**********************************************************'''
def get_write_respond_4e(byte_respond: bytes):
"""
MC 4E帧写回复
:param byte_respond:
:return number,result,index:
"""
number = int.from_bytes(byte_respond[2:4], byteorder='little', signed=False)
respond = byte_respond[:2] + byte_respond[6:15]
str_respond = ''.join(["%02X" % b for b in respond])
# print(str_respond)
str_temp = "D40000FFFF030002000000"
return number, str_respond == str_temp, 15
'''*************应答异常协议内容******************************
*副标题(2)D0 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|应答数据长度(2) 0B 00
*结束代码(2)51 C0
*网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|命令(2)|子命令(2)
********************************************************'''
def get_respond_code(byte_respond: bytes):
if len(byte_respond) > 10:
return int.from_bytes(byte_respond[9:11], byteorder='little', signed=False)
return -1
'''*************应答异常协议内容******************************
*副标题(2)D4 00|序列号(2)34 12|固定值(2)00 00|网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|应答数据长度(2) 0B 00
*结束代码(2)51 C0
*网络编号(1)00|PLC编号(1)FF
*IO编号(2)FF 03|站编号(1)00|命令(2)|子命令(2)
********************************************************'''
def get_respond_code_4e(byte_respond):
if len(byte_respond) > 14:
number = int.from_bytes(byte_respond[2:4], byteorder='little', signed=False)
return number, int.from_bytes(byte_respond[13:15], byteorder='little', signed=False), 24
return 0, -1
| 38.214286
| 108
| 0.609502
| 1,855
| 12,840
| 4.012399
| 0.102426
| 0.053204
| 0.070133
| 0.085718
| 0.824802
| 0.806664
| 0.803574
| 0.789198
| 0.77845
| 0.762999
| 0
| 0.056943
| 0.188941
| 12,840
| 335
| 109
| 38.328358
| 0.657768
| 0.093614
| 0
| 0.719512
| 0
| 0
| 0.052326
| 0.004817
| 0
| 0
| 0.002627
| 0
| 0
| 1
| 0.067073
| false
| 0
| 0.018293
| 0
| 0.189024
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
499a6a5b511663a1459a9503afe7fe946deed6b0
| 2,840
|
py
|
Python
|
aiokts/web/arguments_params.py
|
ktsstudio/aiokts
|
3bf642c2d316632a9d4665239a74d13a2e2bbf74
|
[
"MIT"
] | 6
|
2017-09-23T15:24:06.000Z
|
2018-08-08T19:31:41.000Z
|
aiokts/web/arguments_params.py
|
ktsstudio/aiokts
|
3bf642c2d316632a9d4665239a74d13a2e2bbf74
|
[
"MIT"
] | 1
|
2018-08-08T19:30:55.000Z
|
2018-08-08T19:30:55.000Z
|
aiokts/web/arguments_params.py
|
ktsstudio/aiokts
|
3bf642c2d316632a9d4665239a74d13a2e2bbf74
|
[
"MIT"
] | null | null | null |
import functools
import json
from aiokts.util.arguments import check_arguments
from aiokts.web.error import ServerError
def arguments_params(arglist=None):
if arglist is None:
arglist = {}
def _arguments(func):
@functools.wraps(func)
async def inner(self, *args, **kwargs):
if self.request.method == 'GET':
source = self.request.url.query
else:
if self.request.content_type.startswith('application/json'):
try:
source = await self.request.json()
except json.JSONDecodeError:
raise ServerError(ServerError.BAD_REQUEST(
message='Body must be a valid json'))
else:
source = await self.request.post()
checked_args = check_arguments(arglist, source, cast_type=True)
kwargs.update(checked_args)
return await func(self, *args, **kwargs)
inner._has_arguments_ = True
inner.arglist = arglist
return inner
return _arguments
def arguments_params_get(arglist=None):
if arglist is None:
arglist = {}
def _arguments(func):
@functools.wraps(func)
def inner(self, *args, **kwargs):
checked_args = check_arguments(
arglist, self.request.url.query, cast_type=True)
kwargs.update(checked_args)
return func(self, *args, **kwargs)
inner._has_arguments_ = True
inner.arglist = arglist
return inner
return _arguments
def arguments_params_post(arglist=None):
if arglist is None:
arglist = {}
def _arguments(func):
@functools.wraps(func)
async def inner(self, *args, **kwargs):
data = await self.request.post()
checked_args = check_arguments(arglist, data, cast_type=True)
kwargs.update(checked_args)
return await func(self, *args, **kwargs)
inner._has_arguments_ = True
inner.arglist = arglist
return inner
return _arguments
def arguments_params_json(arglist=None):
if arglist is None:
arglist = {}
def _arguments(func):
@functools.wraps(func)
async def inner(self, *args, **kwargs):
try:
data = await self.request.json()
except json.JSONDecodeError:
raise ServerError(ServerError.BAD_REQUEST(
message='Body must be a valid json'))
checked_args = check_arguments(arglist, data, cast_type=True)
kwargs.update(checked_args)
return await func(self, *args, **kwargs)
inner._has_arguments_ = True
inner.arglist = arglist
return inner
return _arguments
| 29.583333
| 76
| 0.587324
| 301
| 2,840
| 5.385382
| 0.182724
| 0.059223
| 0.069093
| 0.049352
| 0.822949
| 0.789636
| 0.789636
| 0.789636
| 0.764343
| 0.719926
| 0
| 0
| 0.327113
| 2,840
| 95
| 77
| 29.894737
| 0.848247
| 0
| 0
| 0.72973
| 0
| 0
| 0.024296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.121622
| false
| 0
| 0.054054
| 0
| 0.337838
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b8cf08ab48d80f93ec47c8283936b9ba7c6c492b
| 99
|
py
|
Python
|
integration/tests/error_assert_file.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 1,013
|
2020-08-27T12:38:48.000Z
|
2022-03-31T23:12:23.000Z
|
integration/tests/error_assert_file.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 217
|
2020-08-31T11:18:10.000Z
|
2022-03-30T17:50:30.000Z
|
integration/tests/error_assert_file.py
|
youhavethewrong/hurl
|
91cc14882a5f1ef7fa86be09a9f5581cef680559
|
[
"Apache-2.0"
] | 54
|
2020-09-02T09:41:06.000Z
|
2022-03-19T15:33:05.000Z
|
from tests import app
@app.route("/error-assert-file")
def error_assert_file():
return 'Hello'
| 19.8
| 32
| 0.727273
| 15
| 99
| 4.666667
| 0.733333
| 0.314286
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131313
| 99
| 5
| 33
| 19.8
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0.23
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
b8ef57c5de1c01548f2d1ad684c0494d4ce45f62
| 99,774
|
py
|
Python
|
huaweicloud-sdk-iotedge/huaweicloudsdkiotedge/v2/iotedge_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 64
|
2020-06-12T07:05:07.000Z
|
2022-03-30T03:32:50.000Z
|
huaweicloud-sdk-iotedge/huaweicloudsdkiotedge/v2/iotedge_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 11
|
2020-07-06T07:56:54.000Z
|
2022-01-11T11:14:40.000Z
|
huaweicloud-sdk-iotedge/huaweicloudsdkiotedge/v2/iotedge_async_client.py
|
huaweicloud/huaweicloud-sdk-python-v3
|
7a6270390fcbf192b3882bf763e7016e6026ef78
|
[
"Apache-2.0"
] | 24
|
2020-06-08T11:42:13.000Z
|
2022-03-04T06:44:08.000Z
|
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class IoTEdgeAsyncClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(IoTEdgeAsyncClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkiotedge.v2.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@classmethod
def new_builder(cls, clazz=None):
if clazz is None:
return ClientBuilder(cls)
if clazz.__name__ != "IoTEdgeClient":
raise TypeError("client type error, support client type is IoTEdgeClient")
return ClientBuilder(clazz)
def create_edge_node_async(self, request):
"""创建边缘节点
创建边缘节点
:param CreateEdgeNodeRequest request
:return: CreateEdgeNodeResponse
"""
return self.create_edge_node_with_http_info(request)
def create_edge_node_with_http_info(self, request):
"""创建边缘节点
创建边缘节点
:param CreateEdgeNodeRequest request
:return: CreateEdgeNodeResponse
"""
all_params = ['create_edge_node_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateEdgeNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_install_cmd_async(self, request):
"""生成边缘节点安装命令
生成边缘节点安装命令,命令有效时间30分钟,超过后需要重新生成
:param CreateInstallCmdRequest request
:return: CreateInstallCmdResponse
"""
return self.create_install_cmd_with_http_info(request)
def create_install_cmd_with_http_info(self, request):
"""生成边缘节点安装命令
生成边缘节点安装命令,命令有效时间30分钟,超过后需要重新生成
:param CreateInstallCmdRequest request
:return: CreateInstallCmdResponse
"""
all_params = ['edge_node_id', 'arch']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'arch' in local_var_params:
query_params.append(('arch', local_var_params['arch']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/install',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateInstallCmdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_edge_node_async(self, request):
"""删除边缘节点
删除指定边缘节点
:param DeleteEdgeNodeRequest request
:return: DeleteEdgeNodeResponse
"""
return self.delete_edge_node_with_http_info(request)
def delete_edge_node_with_http_info(self, request):
"""删除边缘节点
删除指定边缘节点
:param DeleteEdgeNodeRequest request
:return: DeleteEdgeNodeResponse
"""
all_params = ['edge_node_id', 'delete_external_node']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'delete_external_node' in local_var_params:
query_params.append(('delete_external_node', local_var_params['delete_external_node']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteEdgeNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_edge_nodes_async(self, request):
"""查询边缘节点列表
查询边缘节点列表
:param ListEdgeNodesRequest request
:return: ListEdgeNodesResponse
"""
return self.list_edge_nodes_with_http_info(request)
def list_edge_nodes_with_http_info(self, request):
"""查询边缘节点列表
查询边缘节点列表
:param ListEdgeNodesRequest request
:return: ListEdgeNodesResponse
"""
all_params = ['name', 'state', 'type', 'instance_id', 'space_id', 'node_ids', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'state' in local_var_params:
query_params.append(('state', local_var_params['state']))
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type']))
if 'instance_id' in local_var_params:
query_params.append(('instance_id', local_var_params['instance_id']))
if 'space_id' in local_var_params:
query_params.append(('space_id', local_var_params['space_id']))
if 'node_ids' in local_var_params:
query_params.append(('node_ids', local_var_params['node_ids']))
collection_formats['node_ids'] = 'csv'
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListEdgeNodesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_edge_node_async(self, request):
"""查询边缘节点详情
查询边缘节点详情
:param ShowEdgeNodeRequest request
:return: ShowEdgeNodeResponse
"""
return self.show_edge_node_with_http_info(request)
def show_edge_node_with_http_info(self, request):
"""查询边缘节点详情
查询边缘节点详情
:param ShowEdgeNodeRequest request
:return: ShowEdgeNodeResponse
"""
all_params = ['edge_node_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowEdgeNodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def add_device_async(self, request):
"""添加设备
添加设备
:param AddDeviceRequest request
:return: AddDeviceResponse
"""
return self.add_device_with_http_info(request)
def add_device_with_http_info(self, request):
"""添加设备
添加设备
:param AddDeviceRequest request
:return: AddDeviceResponse
"""
all_params = ['edge_node_id', 'add_device_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/devices',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='AddDeviceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_update_configs_async(self, request):
"""批量修改子设备协议配置
批量修改产品关联的设备,传入product_id修改该产品下所有设备,传入device_id列表,根据device_id修改,两者互斥。
:param BatchUpdateConfigsRequest request
:return: BatchUpdateConfigsResponse
"""
return self.batch_update_configs_with_http_info(request)
def batch_update_configs_with_http_info(self, request):
"""批量修改子设备协议配置
批量修改产品关联的设备,传入product_id修改该产品下所有设备,传入device_id列表,根据device_id修改,两者互斥。
:param BatchUpdateConfigsRequest request
:return: BatchUpdateConfigsResponse
"""
all_params = ['batch_update_configs_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/devices/batch-configs',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchUpdateConfigsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_access_code_async(self, request):
"""生成modbus协议设备接入码
生成modbus协议设备接入码
:param CreateAccessCodeRequest request
:return: CreateAccessCodeResponse
"""
return self.create_access_code_with_http_info(request)
def create_access_code_with_http_info(self, request):
"""生成modbus协议设备接入码
生成modbus协议设备接入码
:param CreateAccessCodeRequest request
:return: CreateAccessCodeResponse
"""
all_params = ['edge_node_id', 'device_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'device_id' in local_var_params:
path_params['device_id'] = local_var_params['device_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/devices/{device_id}/access-code',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateAccessCodeResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_device_async(self, request):
"""删除设备
删除设备
:param DeleteDeviceRequest request
:return: DeleteDeviceResponse
"""
return self.delete_device_with_http_info(request)
def delete_device_with_http_info(self, request):
"""删除设备
删除设备
:param DeleteDeviceRequest request
:return: DeleteDeviceResponse
"""
all_params = ['edge_node_id', 'device_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'device_id' in local_var_params:
path_params['device_id'] = local_var_params['device_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/devices/{device_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteDeviceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_devices_async(self, request):
"""查询设备列表
查询设备列表
:param ListDevicesRequest request
:return: ListDevicesResponse
"""
return self.list_devices_with_http_info(request)
def list_devices_with_http_info(self, request):
"""查询设备列表
查询设备列表
:param ListDevicesRequest request
:return: ListDevicesResponse
"""
all_params = ['edge_node_id', 'gateway_id', 'device_name', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'gateway_id' in local_var_params:
query_params.append(('gateway_id', local_var_params['gateway_id']))
if 'device_name' in local_var_params:
query_params.append(('device_name', local_var_params['device_name']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/devices',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListDevicesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_product_config_async(self, request):
"""获取协议配置
获取协议配置
:param ShowProductConfigRequest request
:return: ShowProductConfigResponse
"""
return self.show_product_config_with_http_info(request)
def show_product_config_with_http_info(self, request):
"""获取协议配置
获取协议配置
:param ShowProductConfigRequest request
:return: ShowProductConfigResponse
"""
all_params = ['protocol_type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'protocol_type' in local_var_params:
query_params.append(('protocol_type', local_var_params['protocol_type']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/protocol-configs',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowProductConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_protocol_mappings_async(self, request):
"""获取协议映射文件
获取协议映射文件
:param ShowProtocolMappingsRequest request
:return: ShowProtocolMappingsResponse
"""
return self.show_protocol_mappings_with_http_info(request)
def show_protocol_mappings_with_http_info(self, request):
"""获取协议映射文件
获取协议映射文件
:param ShowProtocolMappingsRequest request
:return: ShowProtocolMappingsResponse
"""
all_params = ['product_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'product_id' in local_var_params:
path_params['product_id'] = local_var_params['product_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/products/{product_id}/protocol-mappings',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowProtocolMappingsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_device_async(self, request):
"""修改设备
修改设备
:param UpdateDeviceRequest request
:return: UpdateDeviceResponse
"""
return self.update_device_with_http_info(request)
def update_device_with_http_info(self, request):
"""修改设备
修改设备
:param UpdateDeviceRequest request
:return: UpdateDeviceResponse
"""
all_params = ['edge_node_id', 'device_id', 'update_device_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'device_id' in local_var_params:
path_params['device_id'] = local_var_params['device_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/devices/{device_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateDeviceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def upload_protocol_mappings_async(self, request):
"""上传协议映射文件
上传协议映射文件
:param UploadProtocolMappingsRequest request
:return: UploadProtocolMappingsResponse
"""
return self.upload_protocol_mappings_with_http_info(request)
def upload_protocol_mappings_with_http_info(self, request):
"""上传协议映射文件
上传协议映射文件
:param UploadProtocolMappingsRequest request
:return: UploadProtocolMappingsResponse
"""
all_params = ['product_id', 'file']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'product_id' in local_var_params:
path_params['product_id'] = local_var_params['product_id']
query_params = []
header_params = {}
form_params = {}
if 'file' in local_var_params:
form_params['file'] = local_var_params['file']
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['multipart/form-data'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/products/{product_id}/protocol-mappings',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UploadProtocolMappingsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_list_edge_apps_async(self, request):
"""查询应用列表
查询应用列表
:param BatchListEdgeAppsRequest request
:return: BatchListEdgeAppsResponse
"""
return self.batch_list_edge_apps_with_http_info(request)
def batch_list_edge_apps_with_http_info(self, request):
"""查询应用列表
查询应用列表
:param BatchListEdgeAppsRequest request
:return: BatchListEdgeAppsResponse
"""
all_params = ['edge_app_id', 'offset', 'limit', 'app_type', 'function_type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'edge_app_id' in local_var_params:
query_params.append(('edge_app_id', local_var_params['edge_app_id']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'app_type' in local_var_params:
query_params.append(('app_type', local_var_params['app_type']))
if 'function_type' in local_var_params:
query_params.append(('function_type', local_var_params['function_type']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchListEdgeAppsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_edge_app_async(self, request):
"""创建应用
创建应用
:param CreateEdgeAppRequest request
:return: CreateEdgeAppResponse
"""
return self.create_edge_app_with_http_info(request)
def create_edge_app_with_http_info(self, request):
"""创建应用
创建应用
:param CreateEdgeAppRequest request
:return: CreateEdgeAppResponse
"""
all_params = ['create_edge_app_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateEdgeAppResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_edge_app_async(self, request):
"""删除应用
删除应用
:param DeleteEdgeAppRequest request
:return: DeleteEdgeAppResponse
"""
return self.delete_edge_app_with_http_info(request)
def delete_edge_app_with_http_info(self, request):
"""删除应用
删除应用
:param DeleteEdgeAppRequest request
:return: DeleteEdgeAppResponse
"""
all_params = ['edge_app_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteEdgeAppResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_edge_app_async(self, request):
"""查询应用
查询应用
:param ShowEdgeAppRequest request
:return: ShowEdgeAppResponse
"""
return self.show_edge_app_with_http_info(request)
def show_edge_app_with_http_info(self, request):
"""查询应用
查询应用
:param ShowEdgeAppRequest request
:return: ShowEdgeAppResponse
"""
all_params = ['edge_app_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowEdgeAppResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_list_edge_app_versions_async(self, request):
"""查询应用版本列表
查询应用版本列表
:param BatchListEdgeAppVersionsRequest request
:return: BatchListEdgeAppVersionsResponse
"""
return self.batch_list_edge_app_versions_with_http_info(request)
def batch_list_edge_app_versions_with_http_info(self, request):
"""查询应用版本列表
查询应用版本列表
:param BatchListEdgeAppVersionsRequest request
:return: BatchListEdgeAppVersionsResponse
"""
all_params = ['edge_app_id', 'version', 'offset', 'limit', 'ai_card_type', 'arch', 'state']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
query_params = []
if 'version' in local_var_params:
query_params.append(('version', local_var_params['version']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'ai_card_type' in local_var_params:
query_params.append(('ai_card_type', local_var_params['ai_card_type']))
if 'arch' in local_var_params:
query_params.append(('arch', local_var_params['arch']))
if 'state' in local_var_params:
query_params.append(('state', local_var_params['state']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchListEdgeAppVersionsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_edge_application_version_async(self, request):
"""创建应用版本
创建应用版本
:param CreateEdgeApplicationVersionRequest request
:return: CreateEdgeApplicationVersionResponse
"""
return self.create_edge_application_version_with_http_info(request)
def create_edge_application_version_with_http_info(self, request):
"""创建应用版本
创建应用版本
:param CreateEdgeApplicationVersionRequest request
:return: CreateEdgeApplicationVersionResponse
"""
all_params = ['edge_app_id', 'create_edge_application_version_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateEdgeApplicationVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_edge_application_version_async(self, request):
"""删除应用版本
删除应用版本
:param DeleteEdgeApplicationVersionRequest request
:return: DeleteEdgeApplicationVersionResponse
"""
return self.delete_edge_application_version_with_http_info(request)
def delete_edge_application_version_with_http_info(self, request):
"""删除应用版本
删除应用版本
:param DeleteEdgeApplicationVersionRequest request
:return: DeleteEdgeApplicationVersionResponse
"""
all_params = ['edge_app_id', 'version']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions/{version}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteEdgeApplicationVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_edge_application_version_async(self, request):
"""查询应用版本详情
查询应用版本详情
:param ShowEdgeApplicationVersionRequest request
:return: ShowEdgeApplicationVersionResponse
"""
return self.show_edge_application_version_with_http_info(request)
def show_edge_application_version_with_http_info(self, request):
"""查询应用版本详情
查询应用版本详情
:param ShowEdgeApplicationVersionRequest request
:return: ShowEdgeApplicationVersionResponse
"""
all_params = ['edge_app_id', 'version']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions/{version}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowEdgeApplicationVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_edge_application_version_async(self, request):
"""修改应用版本
修改应用版本
:param UpdateEdgeApplicationVersionRequest request
:return: UpdateEdgeApplicationVersionResponse
"""
return self.update_edge_application_version_with_http_info(request)
def update_edge_application_version_with_http_info(self, request):
"""修改应用版本
修改应用版本
:param UpdateEdgeApplicationVersionRequest request
:return: UpdateEdgeApplicationVersionResponse
"""
all_params = ['edge_app_id', 'version', 'update_edge_application_version_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions/{version}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateEdgeApplicationVersionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_edge_application_version_state_async(self, request):
"""更新应用版本状态
更新应用版本状态。
:param UpdateEdgeApplicationVersionStateRequest request
:return: UpdateEdgeApplicationVersionStateResponse
"""
return self.update_edge_application_version_state_with_http_info(request)
def update_edge_application_version_state_with_http_info(self, request):
"""更新应用版本状态
更新应用版本状态。
:param UpdateEdgeApplicationVersionStateRequest request
:return: UpdateEdgeApplicationVersionStateResponse
"""
all_params = ['edge_app_id', 'version', 'update_edge_application_version_state_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_app_id' in local_var_params:
path_params['edge_app_id'] = local_var_params['edge_app_id']
if 'version' in local_var_params:
path_params['version'] = local_var_params['version']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-apps/{edge_app_id}/versions/{version}/state',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateEdgeApplicationVersionStateResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_external_entity_async(self, request):
"""在指定节点上创建外部实体
用户通过在指定边缘节点上设置外部实体的接入信息
:param CreateExternalEntityRequest request
:return: CreateExternalEntityResponse
"""
return self.create_external_entity_with_http_info(request)
def create_external_entity_with_http_info(self, request):
"""在指定节点上创建外部实体
用户通过在指定边缘节点上设置外部实体的接入信息
:param CreateExternalEntityRequest request
:return: CreateExternalEntityResponse
"""
all_params = ['edge_node_id', 'create_external_entity_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/externals',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateExternalEntityResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_external_entity_async(self, request):
"""删除指定节点下外部实体
删除节点下外部实体
:param DeleteExternalEntityRequest request
:return: DeleteExternalEntityResponse
"""
return self.delete_external_entity_with_http_info(request)
def delete_external_entity_with_http_info(self, request):
"""删除指定节点下外部实体
删除节点下外部实体
:param DeleteExternalEntityRequest request
:return: DeleteExternalEntityResponse
"""
all_params = ['edge_node_id', 'external_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'external_id' in local_var_params:
path_params['external_id'] = local_var_params['external_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/externals/{external_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteExternalEntityResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_external_entity_async(self, request):
"""查询指定边缘节点下的外部实体
用户在指定边缘节点上查询外部实体列表
:param ListExternalEntityRequest request
:return: ListExternalEntityResponse
"""
return self.list_external_entity_with_http_info(request)
def list_external_entity_with_http_info(self, request):
"""查询指定边缘节点下的外部实体
用户在指定边缘节点上查询外部实体列表
:param ListExternalEntityRequest request
:return: ListExternalEntityResponse
"""
all_params = ['edge_node_id', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/externals',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListExternalEntityResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_external_entity_async(self, request):
"""查询指定节点下指定外部实体的详情
查询指定节点下指定外部实体的详情
:param ShowExternalEntityRequest request
:return: ShowExternalEntityResponse
"""
return self.show_external_entity_with_http_info(request)
def show_external_entity_with_http_info(self, request):
"""查询指定节点下指定外部实体的详情
查询指定节点下指定外部实体的详情
:param ShowExternalEntityRequest request
:return: ShowExternalEntityResponse
"""
all_params = ['edge_node_id', 'external_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'external_id' in local_var_params:
path_params['external_id'] = local_var_params['external_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/externals/{external_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowExternalEntityResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_external_entity_async(self, request):
"""修改节点下指定的外部实体信息
用户通过在指定边缘节点上修改指定外部实体的接入信息
:param UpdateExternalEntityRequest request
:return: UpdateExternalEntityResponse
"""
return self.update_external_entity_with_http_info(request)
def update_external_entity_with_http_info(self, request):
"""修改节点下指定的外部实体信息
用户通过在指定边缘节点上修改指定外部实体的接入信息
:param UpdateExternalEntityRequest request
:return: UpdateExternalEntityResponse
"""
all_params = ['edge_node_id', 'external_id', 'update_external_entity_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'external_id' in local_var_params:
path_params['external_id'] = local_var_params['external_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/externals/{external_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateExternalEntityResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_list_modules_async(self, request):
"""查询边缘模块列表
用户通过Console接口查询指定边缘节点上边缘模块列表
:param BatchListModulesRequest request
:return: BatchListModulesResponse
"""
return self.batch_list_modules_with_http_info(request)
def batch_list_modules_with_http_info(self, request):
"""查询边缘模块列表
用户通过Console接口查询指定边缘节点上边缘模块列表
:param BatchListModulesRequest request
:return: BatchListModulesResponse
"""
all_params = ['edge_node_id', 'offset', 'limit', 'app_type', 'function_type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
if 'app_type' in local_var_params:
query_params.append(('app_type', local_var_params['app_type']))
if 'function_type' in local_var_params:
query_params.append(('function_type', local_var_params['function_type']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/modules',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchListModulesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_module_async(self, request):
"""创建边缘模块
用户通过Console接口在指定边缘节点上创建边缘模块
:param CreateModuleRequest request
:return: CreateModuleResponse
"""
return self.create_module_with_http_info(request)
def create_module_with_http_info(self, request):
"""创建边缘模块
用户通过Console接口在指定边缘节点上创建边缘模块
:param CreateModuleRequest request
:return: CreateModuleResponse
"""
all_params = ['edge_node_id', 'create_module_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/modules',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateModuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_module_async(self, request):
"""删除边缘模块
用户通过过Console接口在指定边缘节点上删除边缘模块
:param DeleteModuleRequest request
:return: DeleteModuleResponse
"""
return self.delete_module_with_http_info(request)
def delete_module_with_http_info(self, request):
"""删除边缘模块
用户通过过Console接口在指定边缘节点上删除边缘模块
:param DeleteModuleRequest request
:return: DeleteModuleResponse
"""
all_params = ['edge_node_id', 'module_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'module_id' in local_var_params:
path_params['module_id'] = local_var_params['module_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/modules/{module_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteModuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_module_async(self, request):
"""查询边缘模块
用户通过Console接口查询指定边缘节点上指定边缘模块
:param ShowModuleRequest request
:return: ShowModuleResponse
"""
return self.show_module_with_http_info(request)
def show_module_with_http_info(self, request):
"""查询边缘模块
用户通过Console接口查询指定边缘节点上指定边缘模块
:param ShowModuleRequest request
:return: ShowModuleResponse
"""
all_params = ['edge_node_id', 'module_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'module_id' in local_var_params:
path_params['module_id'] = local_var_params['module_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/modules/{module_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowModuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_module_async(self, request):
"""修改边缘模块
用户通过Console接口查询指定边缘节点上指定边缘模块
:param UpdateModuleRequest request
:return: UpdateModuleResponse
"""
return self.update_module_with_http_info(request)
def update_module_with_http_info(self, request):
"""修改边缘模块
用户通过Console接口查询指定边缘节点上指定边缘模块
:param UpdateModuleRequest request
:return: UpdateModuleResponse
"""
all_params = ['edge_node_id', 'module_id', 'update_module_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
if 'module_id' in local_var_params:
path_params['module_id'] = local_var_params['module_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/modules/{module_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateModuleResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_routes_async(self, request):
"""查询边缘路由列表
用户在指定边缘节点上查询边缘路由列表
:param ListRoutesRequest request
:return: ListRoutesResponse
"""
return self.list_routes_with_http_info(request)
def list_routes_with_http_info(self, request):
"""查询边缘路由列表
用户在指定边缘节点上查询边缘路由列表
:param ListRoutesRequest request
:return: ListRoutesResponse
"""
all_params = ['edge_node_id', 'parsed']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
if 'parsed' in local_var_params:
query_params.append(('parsed', local_var_params['parsed']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/routes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListRoutesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_routes_async(self, request):
"""设置边缘路由
用户通过在指定边缘节点上设置边缘路由
:param UpdateRoutesRequest request
:return: UpdateRoutesResponse
"""
return self.update_routes_with_http_info(request)
def update_routes_with_http_info(self, request):
"""设置边缘路由
用户通过在指定边缘节点上设置边缘路由
:param UpdateRoutesRequest request
:return: UpdateRoutesResponse
"""
all_params = ['edge_node_id', 'update_routes_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'edge_node_id' in local_var_params:
path_params['edge_node_id'] = local_var_params['edge_node_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{edge_node_id}/routes',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateRoutesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_confirm_configs_new_async(self, request):
"""批量确认南向3rdIA配置项
南向3rdIA对下发的配置项进行批量确认
:param BatchConfirmConfigsNewRequest request
:return: BatchConfirmConfigsNewResponse
"""
return self.batch_confirm_configs_new_with_http_info(request)
def batch_confirm_configs_new_with_http_info(self, request):
"""批量确认南向3rdIA配置项
南向3rdIA对下发的配置项进行批量确认
:param BatchConfirmConfigsNewRequest request
:return: BatchConfirmConfigsNewResponse
"""
all_params = ['node_id', 'ia_id', 'batch_confirm_configs_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs/batch-confirm',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchConfirmConfigsNewResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_import_configs_async(self, request):
"""批量导入南向3rdIA配置项
批量导入南向3rdIA配置项
:param BatchImportConfigsRequest request
:return: BatchImportConfigsResponse
"""
return self.batch_import_configs_with_http_info(request)
def batch_import_configs_with_http_info(self, request):
"""批量导入南向3rdIA配置项
批量导入南向3rdIA配置项
:param BatchImportConfigsRequest request
:return: BatchImportConfigsResponse
"""
all_params = ['node_id', 'ia_id', 'batch_import_configs_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs/batch-import',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchImportConfigsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_ia_config_async(self, request):
"""删除南向3rdIA配置项
删除南向3rdIA配置项
:param DeleteIaConfigRequest request
:return: DeleteIaConfigResponse
"""
return self.delete_ia_config_with_http_info(request)
def delete_ia_config_with_http_info(self, request):
"""删除南向3rdIA配置项
删除南向3rdIA配置项
:param DeleteIaConfigRequest request
:return: DeleteIaConfigResponse
"""
all_params = ['node_id', 'ia_id', 'config_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs/{config_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteIaConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_ia_configs_async(self, request):
"""查询南向3rdIA配置项列表
查询南向3rdIA配置项列表
:param ListIaConfigsRequest request
:return: ListIaConfigsResponse
"""
return self.list_ia_configs_with_http_info(request)
def list_ia_configs_with_http_info(self, request):
"""查询南向3rdIA配置项列表
查询南向3rdIA配置项列表
:param ListIaConfigsRequest request
:return: ListIaConfigsResponse
"""
all_params = ['node_id', 'ia_id', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListIaConfigsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_ia_config_async(self, request):
"""查询南向3rdIA配置项详情
查询南向3rdIA配置项详情
:param ShowIaConfigRequest request
:return: ShowIaConfigResponse
"""
return self.show_ia_config_with_http_info(request)
def show_ia_config_with_http_info(self, request):
"""查询南向3rdIA配置项详情
查询南向3rdIA配置项详情
:param ShowIaConfigRequest request
:return: ShowIaConfigResponse
"""
all_params = ['node_id', 'ia_id', 'config_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs/{config_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowIaConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_ia_config_async(self, request):
"""创建&更新南向3rdIA配置项信息
创建&更新南向3rdIA配置项信息
:param UpdateIaConfigRequest request
:return: UpdateIaConfigResponse
"""
return self.update_ia_config_with_http_info(request)
def update_ia_config_with_http_info(self, request):
"""创建&更新南向3rdIA配置项信息
创建&更新南向3rdIA配置项信息
:param UpdateIaConfigRequest request
:return: UpdateIaConfigResponse
"""
all_params = ['node_id', 'ia_id', 'config_id', 'update_ia_config_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'node_id' in local_var_params:
path_params['node_id'] = local_var_params['node_id']
if 'ia_id' in local_var_params:
path_params['ia_id'] = local_var_params['ia_id']
if 'config_id' in local_var_params:
path_params['config_id'] = local_var_params['config_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/edge-nodes/{node_id}/ias/{ia_id}/configs/{config_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateIaConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def batch_associate_na_to_nodes_async(self, request):
"""授权北向NA信息到边缘节点
批量授权北向NA信息到边缘节点。 已授权的边缘节点上的南向IA应用,可以通过部署在边缘节点上的api网关访问北向NA提供的接口。
:param BatchAssociateNaToNodesRequest request
:return: BatchAssociateNaToNodesResponse
"""
return self.batch_associate_na_to_nodes_with_http_info(request)
def batch_associate_na_to_nodes_with_http_info(self, request):
"""授权北向NA信息到边缘节点
批量授权北向NA信息到边缘节点。 已授权的边缘节点上的南向IA应用,可以通过部署在边缘节点上的api网关访问北向NA提供的接口。
:param BatchAssociateNaToNodesRequest request
:return: BatchAssociateNaToNodesResponse
"""
all_params = ['na_id', 'action', 'batch_authorize_na_to_nodes_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'na_id' in local_var_params:
path_params['na_id'] = local_var_params['na_id']
query_params = []
if 'action' in local_var_params:
query_params.append(('action', local_var_params['action']))
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas/{na_id}/nodes',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchAssociateNaToNodesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_na_async(self, request):
"""删除北向NA信息
删除北向NA信息,如果有边缘节点已分配该NA信息,会通知到该边缘节点。
:param DeleteNaRequest request
:return: DeleteNaResponse
"""
return self.delete_na_with_http_info(request)
def delete_na_with_http_info(self, request):
"""删除北向NA信息
删除北向NA信息,如果有边缘节点已分配该NA信息,会通知到该边缘节点。
:param DeleteNaRequest request
:return: DeleteNaResponse
"""
all_params = ['na_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'na_id' in local_var_params:
path_params['na_id'] = local_var_params['na_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas/{na_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteNaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_na_authorized_nodes_async(self, request):
"""查询该北向NA信息的已分配节点
查询该北向NA信息的已分配节点
:param ListNaAuthorizedNodesRequest request
:return: ListNaAuthorizedNodesResponse
"""
return self.list_na_authorized_nodes_with_http_info(request)
def list_na_authorized_nodes_with_http_info(self, request):
"""查询该北向NA信息的已分配节点
查询该北向NA信息的已分配节点
:param ListNaAuthorizedNodesRequest request
:return: ListNaAuthorizedNodesResponse
"""
all_params = ['na_id', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'na_id' in local_var_params:
path_params['na_id'] = local_var_params['na_id']
query_params = []
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas/{na_id}/nodes',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListNaAuthorizedNodesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_nas_async(self, request):
"""查询北向NA信息列表
查询北向NA信息列表
:param ListNasRequest request
:return: ListNasResponse
"""
return self.list_nas_with_http_info(request)
def list_nas_with_http_info(self, request):
"""查询北向NA信息列表
查询北向NA信息列表
:param ListNasRequest request
:return: ListNasResponse
"""
all_params = ['name', 'offset', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'name' in local_var_params:
query_params.append(('name', local_var_params['name']))
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListNasResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_na_async(self, request):
"""查询北向NA信息详情
查询北向NA信息详情
:param ShowNaRequest request
:return: ShowNaResponse
"""
return self.show_na_with_http_info(request)
def show_na_with_http_info(self, request):
"""查询北向NA信息详情
查询北向NA信息详情
:param ShowNaRequest request
:return: ShowNaResponse
"""
all_params = ['na_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'na_id' in local_var_params:
path_params['na_id'] = local_var_params['na_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas/{na_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowNaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_na_async(self, request):
"""创建&更新北向NA信息
创建&更新北向NA信息,当更新北向NA信息时,会通知到已分配该北向NA的所有边缘节点。
:param UpdateNaRequest request
:return: UpdateNaResponse
"""
return self.update_na_with_http_info(request)
def update_na_with_http_info(self, request):
"""创建&更新北向NA信息
创建&更新北向NA信息,当更新北向NA信息时,会通知到已分配该北向NA的所有边缘节点。
:param UpdateNaRequest request
:return: UpdateNaResponse
"""
all_params = ['na_id', 'update_na_request_body']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'na_id' in local_var_params:
path_params['na_id'] = local_var_params['na_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json'])
auth_settings = []
return self.call_api(
resource_path='/v2/{project_id}/nas/{na_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateNaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type,
async_request=True)
| 30.568015
| 113
| 0.624972
| 10,150
| 99,774
| 5.720591
| 0.036847
| 0.047396
| 0.082943
| 0.034169
| 0.919107
| 0.90755
| 0.882612
| 0.85342
| 0.839608
| 0.696266
| 0
| 0.001169
| 0.288111
| 99,774
| 3,263
| 114
| 30.577383
| 0.816312
| 0.108866
| 0
| 0.837321
| 0
| 0
| 0.112786
| 0.046821
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.008506
| 0
| 0.115364
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7703f48930373df8ef1edf9beea7d24b2b8bccd7
| 19,017
|
bzl
|
Python
|
third_party/boringssl/BUILD.generated_tests.bzl
|
brandonpollack23/bazel-buildfiles-upstream
|
b904fe862de01f481671ce80ce18b88546d8bf68
|
[
"Apache-2.0"
] | 142
|
2017-06-14T11:39:55.000Z
|
2022-03-20T15:08:39.000Z
|
third_party/boringssl/BUILD.generated_tests.bzl
|
brandonpollack23/bazel-buildfiles-upstream
|
b904fe862de01f481671ce80ce18b88546d8bf68
|
[
"Apache-2.0"
] | 17
|
2015-07-13T02:37:55.000Z
|
2017-05-02T07:12:33.000Z
|
third_party/boringssl/BUILD.generated_tests.bzl
|
brandonpollack23/bazel-buildfiles-upstream
|
b904fe862de01f481671ce80ce18b88546d8bf68
|
[
"Apache-2.0"
] | 54
|
2015-04-20T07:27:43.000Z
|
2017-04-27T21:17:32.000Z
|
# This file is created by generate_build_files.py. Do not edit manually.
test_support_sources = [
"src/crypto/test/file_test.cc",
"src/crypto/test/test_util.cc",
]
def create_tests(copts):
test_support_sources_complete = test_support_sources + \
native.glob(["src/crypto/test/*.h"])
native.cc_test(
name = "aes_test",
size = "small",
srcs = ["src/crypto/aes/aes_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "base64_test",
size = "small",
srcs = ["src/crypto/base64/base64_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "bio_test",
size = "small",
srcs = ["src/crypto/bio/bio_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "bn_test",
size = "small",
srcs = ["src/crypto/bn/bn_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "bytestring_test",
size = "small",
srcs = ["src/crypto/bytestring/bytestring_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_gcm",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-gcm",
"$(location src/crypto/cipher/test/aes_128_gcm_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_gcm_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_key_wrap",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-key-wrap",
"$(location src/crypto/cipher/test/aes_128_key_wrap_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_key_wrap_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_gcm",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-gcm",
"$(location src/crypto/cipher/test/aes_256_gcm_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_gcm_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_key_wrap",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-key-wrap",
"$(location src/crypto/cipher/test/aes_256_key_wrap_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_key_wrap_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_chacha20_poly1305",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"chacha20-poly1305",
"$(location src/crypto/cipher/test/chacha20_poly1305_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/chacha20_poly1305_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_chacha20_poly1305_old",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"chacha20-poly1305-old",
"$(location src/crypto/cipher/test/chacha20_poly1305_old_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/chacha20_poly1305_old_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_rc4_md5_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"rc4-md5-tls",
"$(location src/crypto/cipher/test/rc4_md5_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/rc4_md5_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_rc4_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"rc4-sha1-tls",
"$(location src/crypto/cipher/test/rc4_sha1_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/rc4_sha1_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-cbc-sha1-tls",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha256_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-cbc-sha256-tls",
"$(location src/crypto/cipher/test/aes_128_cbc_sha256_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_cbc_sha256_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-cbc-sha1-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha256_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-cbc-sha256-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha256_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_cbc_sha256_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha384_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-cbc-sha384-tls",
"$(location src/crypto/cipher/test/aes_256_cbc_sha384_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_cbc_sha384_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_tls",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"des-ede3-cbc-sha1-tls",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_tls_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_tls_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_tls_implicit_iv",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"des-ede3-cbc-sha1-tls-implicit-iv",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_tls_implicit_iv_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_tls_implicit_iv_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_rc4_md5_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"rc4-md5-ssl3",
"$(location src/crypto/cipher/test/rc4_md5_ssl3_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/rc4_md5_ssl3_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_rc4_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"rc4-sha1-ssl3",
"$(location src/crypto/cipher/test/rc4_sha1_ssl3_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/rc4_sha1_ssl3_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/aes_128_cbc_sha1_ssl3_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_cbc_sha1_ssl3_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/aes_256_cbc_sha1_ssl3_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_cbc_sha1_ssl3_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_des_ede3_cbc_sha1_ssl3",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"des-ede3-cbc-sha1-ssl3",
"$(location src/crypto/cipher/test/des_ede3_cbc_sha1_ssl3_tests.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/des_ede3_cbc_sha1_ssl3_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_128_ctr_hmac_sha256",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-128-ctr-hmac-sha256",
"$(location src/crypto/cipher/test/aes_128_ctr_hmac_sha256.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_128_ctr_hmac_sha256.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "aead_test_aes_256_ctr_hmac_sha256",
size = "small",
srcs = ["src/crypto/cipher/aead_test.cc"] + test_support_sources_complete,
args = [
"aes-256-ctr-hmac-sha256",
"$(location src/crypto/cipher/test/aes_256_ctr_hmac_sha256.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/aes_256_ctr_hmac_sha256.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "cipher_test",
size = "small",
srcs = ["src/crypto/cipher/cipher_test.cc"] + test_support_sources_complete,
args = [
"$(location src/crypto/cipher/test/cipher_test.txt)",
],
copts = copts,
data = [
"src/crypto/cipher/test/cipher_test.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "cmac_test",
size = "small",
srcs = ["src/crypto/cmac/cmac_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "constant_time_test",
size = "small",
srcs = ["src/crypto/constant_time_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "ed25519_test",
size = "small",
srcs = ["src/crypto/curve25519/ed25519_test.cc"] + test_support_sources_complete,
args = [
"$(location src/crypto/curve25519/ed25519_tests.txt)",
],
copts = copts,
data = [
"src/crypto/curve25519/ed25519_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "x25519_test",
size = "small",
srcs = ["src/crypto/curve25519/x25519_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "dh_test",
size = "small",
srcs = ["src/crypto/dh/dh_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "digest_test",
size = "small",
srcs = ["src/crypto/digest/digest_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "dsa_test",
size = "small",
srcs = ["src/crypto/dsa/dsa_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "ec_test",
size = "small",
srcs = ["src/crypto/ec/ec_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "example_mul",
size = "small",
srcs = ["src/crypto/ec/example_mul.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "ecdsa_test",
size = "small",
srcs = ["src/crypto/ecdsa/ecdsa_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "err_test",
size = "small",
srcs = ["src/crypto/err/err_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "evp_extra_test",
size = "small",
srcs = ["src/crypto/evp/evp_extra_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "evp_test",
size = "small",
srcs = ["src/crypto/evp/evp_test.cc"] + test_support_sources_complete,
args = [
"$(location src/crypto/evp/evp_tests.txt)",
],
copts = copts,
data = [
"src/crypto/evp/evp_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "pbkdf_test",
size = "small",
srcs = ["src/crypto/evp/pbkdf_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "hkdf_test",
size = "small",
srcs = ["src/crypto/hkdf/hkdf_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "hmac_test",
size = "small",
srcs = ["src/crypto/hmac/hmac_test.cc"] + test_support_sources_complete,
args = [
"$(location src/crypto/hmac/hmac_tests.txt)",
],
copts = copts,
data = [
"src/crypto/hmac/hmac_tests.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "lhash_test",
size = "small",
srcs = ["src/crypto/lhash/lhash_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "gcm_test",
size = "small",
srcs = ["src/crypto/modes/gcm_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "pkcs8_test",
size = "small",
srcs = ["src/crypto/pkcs8/pkcs8_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "pkcs12_test",
size = "small",
srcs = ["src/crypto/pkcs8/pkcs12_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "poly1305_test",
size = "small",
srcs = ["src/crypto/poly1305/poly1305_test.cc"] + test_support_sources_complete,
args = [
"$(location src/crypto/poly1305/poly1305_test.txt)",
],
copts = copts,
data = [
"src/crypto/poly1305/poly1305_test.txt",
],
deps = [":crypto"],
)
native.cc_test(
name = "refcount_test",
size = "small",
srcs = ["src/crypto/refcount_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "rsa_test",
size = "small",
srcs = ["src/crypto/rsa/rsa_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "thread_test",
size = "small",
srcs = ["src/crypto/thread_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "pkcs7_test",
size = "small",
srcs = ["src/crypto/x509/pkcs7_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "x509_test",
size = "small",
srcs = ["src/crypto/x509/x509_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "tab_test",
size = "small",
srcs = ["src/crypto/x509v3/tab_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "v3name_test",
size = "small",
srcs = ["src/crypto/x509v3/v3name_test.c"] + test_support_sources_complete,
copts = copts,
deps = [":crypto"],
)
native.cc_test(
name = "pqueue_test",
size = "small",
srcs = ["src/ssl/pqueue/pqueue_test.c"] + test_support_sources_complete,
copts = copts,
deps = [
":crypto",
":ssl",
],
)
native.cc_test(
name = "ssl_test",
size = "small",
srcs = ["src/ssl/ssl_test.cc"] + test_support_sources_complete,
copts = copts,
deps = [
":crypto",
":ssl",
],
)
| 27.402017
| 91
| 0.577536
| 2,288
| 19,017
| 4.493007
| 0.043269
| 0.104183
| 0.109436
| 0.15428
| 0.934922
| 0.932782
| 0.859825
| 0.809241
| 0.776459
| 0.746304
| 0
| 0.036474
| 0.273387
| 19,017
| 693
| 92
| 27.441558
| 0.707483
| 0.003681
| 0
| 0.611727
| 1
| 0
| 0.385643
| 0.300818
| 0
| 0
| 0
| 0
| 0
| 1
| 0.001585
| false
| 0
| 0
| 0
| 0.001585
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
771c05104910fc9bc0f80052a40fba6b9ec61286
| 4,236
|
py
|
Python
|
tests/008-test-request.py
|
arnaudsj/restkit
|
cf035d9245115ad6fa25e623bcf1bec5eb85cfff
|
[
"MIT"
] | 1
|
2015-11-05T13:59:20.000Z
|
2015-11-05T13:59:20.000Z
|
tests/008-test-request.py
|
arnaudsj/restkit
|
cf035d9245115ad6fa25e623bcf1bec5eb85cfff
|
[
"MIT"
] | null | null | null |
tests/008-test-request.py
|
arnaudsj/restkit
|
cf035d9245115ad6fa25e623bcf1bec5eb85cfff
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -
#
# This file is part of restkit released under the MIT license.
# See the NOTICE for more information.
import t
from restkit import request
from _server_test import HOST, PORT
LONG_BODY_PART = """This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client...
This is a relatively long body, that we send to the client..."""
def test_001():
u = "http://%s:%s" % (HOST, PORT)
r = request(u)
t.eq(r.status_int, 200)
t.eq(r.body_string(), "welcome")
def test_002():
u = "http://%s:%s" % (HOST, PORT)
r = request(u, 'POST', body=LONG_BODY_PART)
t.eq(r.status_int, 200)
body = r.body_string()
t.eq(len(body), len(LONG_BODY_PART))
t.eq(body, LONG_BODY_PART)
def test_003():
u = "http://test:test@%s:%s/auth" % (HOST, PORT)
r = request(u)
t.eq(r.status_int, 200)
u = "http://test:test2@%s:%s/auth" % (HOST, PORT)
r = request(u)
t.eq(r.status_int, 403)
| 48.689655
| 81
| 0.713881
| 799
| 4,236
| 3.760951
| 0.068836
| 0.157072
| 0.12812
| 0.311148
| 0.902496
| 0.893511
| 0.888186
| 0.888186
| 0.888186
| 0.87787
| 0
| 0.006779
| 0.199009
| 4,236
| 87
| 82
| 48.689655
| 0.878868
| 0.027856
| 0
| 0.792208
| 0
| 0
| 0.85051
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038961
| false
| 0
| 0.038961
| 0
| 0.077922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
772d2f175d640d0711913f509c67e4ad3c192d67
| 5,722
|
py
|
Python
|
python/util/evaluate_dbg.py
|
mega002/DANN-MNLI
|
bd27c5ec70d2b68453dd16f90a3b8d2f28f7a945
|
[
"Apache-2.0"
] | null | null | null |
python/util/evaluate_dbg.py
|
mega002/DANN-MNLI
|
bd27c5ec70d2b68453dd16f90a3b8d2f28f7a945
|
[
"Apache-2.0"
] | null | null | null |
python/util/evaluate_dbg.py
|
mega002/DANN-MNLI
|
bd27c5ec70d2b68453dd16f90a3b8d2f28f7a945
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
def evaluate_classifier(classifier, eval_set, batch_size):
"""
Function to get accuracy and cost of the model, evaluated on a chosen dataset.
classifier: the model's classfier, it should return genres, logit values, and cost for a given minibatch of the evaluation dataset
eval_set: the chosen evaluation set, for eg. the dev-set
batch_size: the size of minibatches.
"""
correct = 0
genres, hypotheses, cost = classifier(eval_set)
cost = cost / batch_size
full_batch = int(len(eval_set) / batch_size) * batch_size
for i in range(full_batch):
hypothesis = hypotheses[i]
if hypothesis == eval_set[i]['label']:
correct += 1
return correct / float(len(eval_set)), cost
def evaluate_classifiers(classifier, eval_set, batch_size):
"""
Function to get accuracy and cost of the model, evaluated on a chosen dataset.
classifier: the model's classfier, it should return genres, logit values, and cost for a given minibatch of the evaluation dataset
eval_set: the chosen evaluation set, for eg. the dev-set
batch_size: the size of minibatches.
"""
correct = 0
dcorrect = 0
genres, hypotheses, cost, dhypotheses, dcost = classifier(eval_set, dbg=True)
cost = cost / batch_size
dcost = dcost / batch_size
full_batch = int(len(eval_set) / batch_size) * batch_size
for i in range(full_batch):
hypothesis = hypotheses[i]
dhypothesis = dhypotheses[i]
if hypothesis == eval_set[i]['label']:
correct += 1
if dhypothesis == eval_set[i]['domain']:
dcorrect += 1
return correct / float(len(eval_set)), cost, dcorrect / float(len(eval_set)), dcost
def evaluate_classifier_genre(classifier, eval_set, batch_size):
"""
Function to get accuracy and cost of the model by genre, evaluated on a chosen dataset. It returns a dictionary of accuracies by genre and cost for the full evaluation dataset.
classifier: the model's classfier, it should return genres, logit values, and cost for a given minibatch of the evaluation dataset
eval_set: the chosen evaluation set, for eg. the dev-set
batch_size: the size of minibatches.
"""
genres, hypotheses, cost = classifier(eval_set)
correct = dict((genre,0) for genre in set(genres))
count = dict((genre,0) for genre in set(genres))
cost = cost / batch_size
full_batch = int(len(eval_set) / batch_size) * batch_size
for i in range(full_batch):
hypothesis = hypotheses[i]
genre = genres[i]
if hypothesis == eval_set[i]['label']:
correct[genre] += 1.
count[genre] += 1.
if genre != eval_set[i]['genre']:
print 'welp!'
accuracy = {k: correct[k]/count[k] for k in correct}
return accuracy, cost
def evaluate_classifiers_genre(classifier, eval_set, batch_size):
"""
Function to get accuracy and cost of the model by genre, evaluated on a chosen dataset. It returns a dictionary of accuracies by genre and cost for the full evaluation dataset.
classifier: the model's classfier, it should return genres, logit values, and cost for a given minibatch of the evaluation dataset
eval_set: the chosen evaluation set, for eg. the dev-set
batch_size: the size of minibatches.
"""
genres, hypotheses, cost, dhypotheses, dcost = classifier(eval_set, dbg=True)
correct = dict((genre, 0) for genre in set(genres))
dcorrect = dict((genre, 0) for genre in set(genres))
count = dict((genre, 0) for genre in set(genres))
cost = cost / batch_size
dcost = dcost / batch_size
full_batch = int(len(eval_set) / batch_size) * batch_size
for i in range(full_batch):
hypothesis = hypotheses[i]
dhypothesis = dhypotheses[i]
genre = genres[i]
if hypothesis == eval_set[i]['label']:
correct[genre] += 1.
if dhypothesis == eval_set[i]['domain']:
dcorrect[genre] += 1.
count[genre] += 1.
if genre != eval_set[i]['genre']:
print 'welp!'
accuracy = {k: correct[k] / count[k] for k in correct}
daccuracy = {k: dcorrect[k] / count[k] for k in dcorrect}
return accuracy, cost, daccuracy, dcost
def evaluate_classifier_dbg(classifier, eval_set, batch_size, snlidbg):
"""
Function to get accuracy and cost of the model, evaluated on a chosen dataset.
classifier: the model's classfier, it should return genres, logit values, and cost for a given minibatch of the evaluation dataset
eval_set: the chosen evaluation set, for eg. the dev-set
batch_size: the size of minibatches.
"""
correct = 0
genres, logits, cost = classifier(eval_set, dbg=True)
cost = cost / batch_size
full_batch = int(len(eval_set) / batch_size) * batch_size
for i in range(full_batch):
logit = logits[i]
if np.argmax(logit) == eval_set[i]['label']:
correct += 1
snlidbg.write('{}\t"{}"\t"{}"\t{}\t{}\t{}\t{}\t{}\n'.format(eval_set[i]['pairID'], eval_set[i]['sentence1'],
eval_set[i]['sentence2'], eval_set[i]['label'],
logit[0], logit[1], logit[2], 'y'))
else:
snlidbg.write('{}\t"{}"\t"{}"\t{}\t{}\t{}\t{}\t{}\n'.format(eval_set[i]['pairID'], eval_set[i]['sentence1'],
eval_set[i]['sentence2'], eval_set[i]['label'],
logit[0], logit[1], logit[2], 'n'))
return correct / float(len(eval_set)), cost
| 42.073529
| 180
| 0.627753
| 787
| 5,722
| 4.454892
| 0.109276
| 0.08186
| 0.038791
| 0.045636
| 0.915573
| 0.908157
| 0.884769
| 0.875642
| 0.837136
| 0.799201
| 0
| 0.006643
| 0.263369
| 5,722
| 136
| 181
| 42.073529
| 0.825148
| 0
| 0
| 0.753086
| 0
| 0
| 0.048926
| 0.018638
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.012346
| null | null | 0.024691
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6224baff69b2332c67fd4373e3234a0b6b81a596
| 63
|
py
|
Python
|
p029.py
|
piohhmy/euler
|
f1b548a28a503fb8a0878fda75c32e1dcfb33243
|
[
"MIT"
] | null | null | null |
p029.py
|
piohhmy/euler
|
f1b548a28a503fb8a0878fda75c32e1dcfb33243
|
[
"MIT"
] | null | null | null |
p029.py
|
piohhmy/euler
|
f1b548a28a503fb8a0878fda75c32e1dcfb33243
|
[
"MIT"
] | null | null | null |
print(len({a**b for a in range(2,101) for b in range(2,101)}))
| 31.5
| 62
| 0.634921
| 16
| 63
| 2.5
| 0.5625
| 0.35
| 0.4
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0.142857
| 63
| 1
| 63
| 63
| 0.592593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6271d76d5bdeada92511b50b997519e6035036cd
| 7,432
|
py
|
Python
|
src/support/base_models.py
|
pierpaolo28/Epidemics-Modelling
|
4fb18091f5f2f29cf0981841bb4902a7f36c1469
|
[
"MIT"
] | 11
|
2021-01-12T21:34:17.000Z
|
2022-03-22T08:01:06.000Z
|
src/support/base_models.py
|
pierpaolo28/Epidemics-Modelling
|
4fb18091f5f2f29cf0981841bb4902a7f36c1469
|
[
"MIT"
] | null | null | null |
src/support/base_models.py
|
pierpaolo28/Epidemics-Modelling
|
4fb18091f5f2f29cf0981841bb4902a7f36c1469
|
[
"MIT"
] | null | null | null |
import streamlit as st
from scipy.integrate import odeint
import numpy as np
import matplotlib.pyplot as plt
import plotly.graph_objects as go
from plotly.subplots import make_subplots
def sir_step_ahead(y, t, N, beta, gamma):
S, I, R = y
dsdt = -beta * I * (S / N)
didt = beta * I * (S / N) - gamma * I
drdt = gamma * I
return dsdt, didt, drdt
def SIR_sim(N, sim_days, orig_infected, prob_infect, contact_with_people, days):
y0 = N-orig_infected, orig_infected, 0
beta = prob_infect*contact_with_people
gamma = 1.0 / days
R0 = beta/gamma
t = np.linspace(0, sim_days-1, sim_days)
sim_res = odeint(sir_step_ahead, y0, t, args=(N, beta, gamma))
S, I, R = sim_res.T
return S, I, R, R0
def SIR_plot(negatives, positives, survivors, R0):
fig = go.Figure(
data=[go.Scatter(x=[i for i in range(len(negatives))], y=negatives,
mode="lines", name='Susceptible',
line=dict(width=2, color="blue")),
go.Scatter(x=[i for i in range(len(negatives))], y=positives,
mode="lines", name='Infected',
line=dict(width=2, color="green")),
go.Scatter(x=[i for i in range(len(negatives))], y=survivors,
mode="lines", name='Recovered',
line=dict(width=2, color="orange"))],
layout=go.Layout(
title_text="Standard SIR Model (R<sub>0</sub>=" + str(round(R0, 2))+')', hovermode="closest",
updatemenus=[
{
"buttons": [
{
"args": [None, {"frame": {"duration": 100, "redraw": False},
"fromcurrent": True,
"transition": {"duration": 10,
"easing": "quadratic-in-out"}}],
"label": "Play",
"method": "animate"
},
{
"args": [[None], {"frame": {"duration": 0, "redraw": False},
"mode": "immediate",
"transition": {"duration": 0}}],
"label": "Pause",
"method": "animate"
}
],
"direction": "left",
"pad": {"r": 10, "t": 87},
"showactive": False,
"type": "buttons",
"x": 0.14,
"xanchor": "right",
"y": 1.65,
"yanchor": "top"
}
]),
frames=[go.Frame(
data=[go.Scatter(
x=[i for i in range(k)],
y=negatives,
mode="lines",
line=dict(width=2, color="blue")),
go.Scatter(
x=[i for i in range(k)],
y=positives,
mode="lines",
line=dict(width=2, color="green")),
go.Scatter(
x=[i for i in range(k)],
y=survivors,
mode="lines",
line=dict(width=2, color="orange"))])
for k in range(len(negatives))],
)
fig.update_xaxes(title_text="Number of Days")
fig.update_yaxes(title_text="Number of Cases")
st.plotly_chart(fig)
def seir_step_ahead(y, t, N, beta, gamma, delta):
S, E, I, R = y
dsdt = -beta * I * (S / N)
dedt = beta * I * (S / N) - delta * E
didt = delta * E - gamma * I
drdt = gamma * I
return dsdt, dedt, didt, drdt
def SEIR_sim(N, sim_days, orig_esposed, prob_infect, contact_with_people, days, inc_days):
y0 = N-orig_esposed, orig_esposed, 0, 0
beta = prob_infect*contact_with_people
gamma = 1.0 / days
delta = 1.0 / inc_days
R0 = beta/gamma
t = np.linspace(0, sim_days-1, sim_days)
sim_res = odeint(seir_step_ahead, y0, t, args=(N, beta, gamma, delta))
S, E, I, R = sim_res.T
return S, E, I, R, R0
def SEIR_plot(negatives, esposed, positives, survivors, R0):
fig = go.Figure(
data=[go.Scatter(x=[i for i in range(len(negatives))], y=negatives,
mode="lines", name='Susceptible',
line=dict(width=2, color="blue")),
go.Scatter(x=[i for i in range(len(negatives))], y=esposed,
mode="lines", name='Esposed',
line=dict(width=2, color="red")),
go.Scatter(x=[i for i in range(len(negatives))], y=positives,
mode="lines", name='Infected',
line=dict(width=2, color="green")),
go.Scatter(x=[i for i in range(len(negatives))], y=survivors,
mode="lines", name='Recovered',
line=dict(width=2, color="orange"))],
layout=go.Layout(
title_text="Standard SEIR Model (R<sub>0</sub>=" + str(round(R0, 2))+')', hovermode="closest",
updatemenus=[
{
"buttons": [
{
"args": [None, {"frame": {"duration": 100, "redraw": False},
"fromcurrent": True,
"transition": {"duration": 10,
"easing": "quadratic-in-out"}}],
"label": "Play",
"method": "animate"
},
{
"args": [[None], {"frame": {"duration": 0, "redraw": False},
"mode": "immediate",
"transition": {"duration": 0}}],
"label": "Pause",
"method": "animate"
}
],
"direction": "left",
"pad": {"r": 10, "t": 87},
"showactive": False,
"type": "buttons",
"x": 0.14,
"xanchor": "right",
"y": 1.65,
"yanchor": "top"
}
]),
frames=[go.Frame(
data=[go.Scatter(
x=[i for i in range(k)],
y=negatives,
mode="lines",
line=dict(width=2, color="blue")),
go.Scatter(
x=[i for i in range(k)],
y=esposed,
mode="lines",
line=dict(width=2, color="red")),
go.Scatter(
x=[i for i in range(k)],
y=positives,
mode="lines",
line=dict(width=2, color="green")),
go.Scatter(
x=[i for i in range(k)],
y=survivors,
mode="lines",
line=dict(width=2, color="orange"))])
for k in range(len(negatives))],
)
fig.update_xaxes(title_text="Number of Days")
fig.update_yaxes(title_text="Number of Cases")
st.plotly_chart(fig)
| 38.910995
| 106
| 0.416442
| 769
| 7,432
| 3.951886
| 0.176853
| 0.036854
| 0.046068
| 0.050675
| 0.859493
| 0.849622
| 0.828562
| 0.788088
| 0.753208
| 0.753208
| 0
| 0.018703
| 0.446044
| 7,432
| 190
| 107
| 39.115789
| 0.719456
| 0
| 0
| 0.726744
| 0
| 0
| 0.11141
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.034884
| false
| 0
| 0.034884
| 0
| 0.093023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.