hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
baa129495652e11f247baadde4f8c9ce3a98ac65
| 49
|
py
|
Python
|
examples/get_hwid.py
|
AndyOnTop/AndyOnTop
|
a1007efe2bc69945fb6ce50e5ea786a3ad7afdce
|
[
"MIT"
] | null | null | null |
examples/get_hwid.py
|
AndyOnTop/AndyOnTop
|
a1007efe2bc69945fb6ce50e5ea786a3ad7afdce
|
[
"MIT"
] | null | null | null |
examples/get_hwid.py
|
AndyOnTop/AndyOnTop
|
a1007efe2bc69945fb6ce50e5ea786a3ad7afdce
|
[
"MIT"
] | null | null | null |
from andyontop import get_hwid
print(get_hwid())
| 24.5
| 31
| 0.816327
| 8
| 49
| 4.75
| 0.75
| 0.368421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 49
| 2
| 32
| 24.5
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
baab4876b9fdf246f6fd742a57b608ed2e4491b5
| 254
|
py
|
Python
|
tacotron2/tokenizers/__init__.py
|
DashaSerdyuk/tacotron2
|
1a88669670750f8b0e1aff76abc8b1b15300e1dc
|
[
"BSD-3-Clause"
] | 1
|
2020-03-03T23:04:05.000Z
|
2020-03-03T23:04:05.000Z
|
tacotron2/tokenizers/__init__.py
|
DashaSerdyuk/tacotron2
|
1a88669670750f8b0e1aff76abc8b1b15300e1dc
|
[
"BSD-3-Clause"
] | null | null | null |
tacotron2/tokenizers/__init__.py
|
DashaSerdyuk/tacotron2
|
1a88669670750f8b0e1aff76abc8b1b15300e1dc
|
[
"BSD-3-Clause"
] | 1
|
2020-03-26T19:37:46.000Z
|
2020-03-26T19:37:46.000Z
|
from tacotron2.tokenizers.english_grapheme_tokenizers import EnglishGraphemeTokenizer
from tacotron2.tokenizers.russian_grapheme_tokenizer import RussianGraphemeTokenizer
from tacotron2.tokenizers.russian_phoneme_tokenizer import RussianPhonemeTokenizer
| 63.5
| 85
| 0.929134
| 24
| 254
| 9.583333
| 0.5
| 0.169565
| 0.3
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012397
| 0.047244
| 254
| 3
| 86
| 84.666667
| 0.938017
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
babad909a0270ad1d78b9d68bce2cd168b8ac288
| 23,049
|
py
|
Python
|
tests/unit/operations/test_statusops.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/operations/test_statusops.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/operations/test_statusops.py
|
senstb/aws-elastic-beanstalk-cli
|
ef27ae50e8be34ccbe29bc6dc421323bddc3f485
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import datetime
from dateutil import tz
import mock
import unittest
from ebcli.operations import statusops
from ebcli.objects.environment import Environment
from ebcli.objects.platform import PlatformVersion
from ebcli.objects.tier import Tier
from .. import mock_responses
class TestStatusOps(unittest.TestCase):
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
def test_status__non_verbose_mode__codecommit_setup__using_non_latest_platform(
self,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
statusops.status('my-application', 'environment-1', False)
log_alert_mock.assert_called_once_with(
'There is a newer version of the platform used by your environment. '
'You can upgrade your environment to the most recent platform version by typing "eb upgrade".'
)
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
def test_status__non_verbose_mode__codecommit_not_setup__using_non_latest_platform(
self,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5')
get_environment_mock.return_value = environment_object
get_environment_mock.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5')
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = None
get_default_repository_mock.return_value = None
statusops.status('my-application', 'environment-1', False)
log_alert_mock.assert_called_once_with(
'There is a newer version of the platform used by your environment. '
'You can upgrade your environment to the most recent platform version by typing "eb upgrade".'
)
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.5')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
def test_status__non_verbose_mode__codecommit_setup__using_latest_platform(
self,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
statusops.status('my-application', 'environment-1', False)
log_alert_mock.assert_not_called()
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment_resources')
@mock.patch('ebcli.operations.statusops.elbv2.get_target_groups_for_load_balancer')
@mock.patch('ebcli.operations.statusops.elbv2.get_target_group_healths')
def test_status__verbose_mode__elbv2(
self,
get_target_group_healths_mock,
get_target_groups_for_load_balancer_mock,
get_environment_resources_mock,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
get_environment_resources_mock.return_value = mock_responses.DESCRIBE_ENVIRONMENT_RESOURCES_RESPONSE__ELBV2_ENVIRONMENT
get_target_groups_for_load_balancer_mock.return_value = mock_responses.DESCRIBE_TARGET_GROUPS_RESPONSE['TargetGroups']
get_target_group_healths_mock.return_value = {
"arn:aws:elasticloadbalancing:us-west-2:123123123123:targetgroup/awseb-AWSEB-179V6JWWL9HI5/e57decc4139bfdd2": mock_responses.DESCRIBE_TARGET_HEALTH_RESPONSE
}
statusops.status('my-application', 'environment-1', True)
log_alert_mock.assert_not_called()
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call(' Running instances:', 1),
mock.call(' ', 'i-01641763db1c0cb47: healthy'),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment_resources')
@mock.patch('ebcli.operations.statusops.elbv2.get_target_groups_for_load_balancer')
@mock.patch('ebcli.operations.statusops.elbv2.get_target_group_healths')
def test_status__verbose_mode__elbv2__elb_registration_in_progress__some_instances_are_not_registered_with_target_groups(
self,
get_target_group_healths_mock,
get_target_groups_for_load_balancer_mock,
get_environment_resources_mock,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
get_environment_resources_response = mock_responses.DESCRIBE_ENVIRONMENT_RESOURCES_RESPONSE__ELBV2_ENVIRONMENT
get_environment_resources_response['EnvironmentResources']['Instances'].append(
{
"Id": "i-12141763d121c0cb47"
}
)
get_environment_resources_mock.return_value = get_environment_resources_response
get_target_groups_for_load_balancer_mock.return_value = mock_responses.DESCRIBE_TARGET_GROUPS_RESPONSE['TargetGroups']
get_target_group_healths_mock.return_value = {
"arn:aws:elasticloadbalancing:us-west-2:123123123123:targetgroup/awseb-AWSEB-179V6JWWL9HI5/e57decc4139bfdd2": mock_responses.DESCRIBE_TARGET_HEALTH_RESPONSE__REGISTRATION_IN_PROGRESS
}
statusops.status('my-application', 'environment-1', True)
log_alert_mock.assert_not_called()
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call(' Running instances:', 2),
mock.call(' ', 'i-01641763db1c0cb47: initial'),
mock.call(' ', 'Description: Target registration is in progress'),
mock.call(' ', 'Reason: Elb.RegistrationInProgress'),
mock.call(' ', 'i-12141763d121c0cb47:', 'N/A (Not registered with Target Group)'),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment_resources')
@mock.patch('ebcli.operations.statusops.elb.get_health_of_instances')
def test_status__verbose_mode__elb(
self,
get_health_of_instances_mock,
get_environment_resources_mock,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
get_environment_resources_mock.return_value = mock_responses.DESCRIBE_ENVIRONMENT_RESOURCES_RESPONSE
get_health_of_instances_mock.return_value = mock_responses.DESCRIBE_INSTANCE_HEALTH['InstanceStates']
statusops.status('my-application', 'environment-1', True)
log_alert_mock.assert_not_called()
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call(' Running instances:', 2),
mock.call(' ', 'i-23452345346456566:', 'InService'),
mock.call(' ', 'i-21312312312312312:', 'OutOfService'),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment')
@mock.patch('ebcli.operations.statusops.solution_stack_ops.find_solution_stack_from_string')
@mock.patch('ebcli.operations.statusops.io.log_alert')
@mock.patch('ebcli.operations.statusops.io.echo')
@mock.patch('ebcli.operations.statusops.gitops.get_default_branch')
@mock.patch('ebcli.operations.statusops.gitops.get_default_repository')
@mock.patch('ebcli.operations.statusops.elasticbeanstalk.get_environment_resources')
def test_status__verbose_mode__non_load_balanced(
self,
get_environment_resources_mock,
get_default_repository_mock,
get_default_branch_mock,
echo_mock,
log_alert_mock,
find_solution_stack_from_string_mock,
get_environment_mock
):
environment_object = Environment.json_to_environment_object(
mock_responses.DESCRIBE_ENVIRONMENTS_RESPONSE['Environments'][0]
)
environment_object.platform = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_environment_mock.return_value = environment_object
find_solution_stack_from_string_mock.return_value = PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')
get_default_branch_mock.return_value = 'branch'
get_default_repository_mock.return_value = 'repository'
get_environment_resources_mock.return_value = mock_responses.DESCRIBE_ENVIRONMENT_RESOURCES_RESPONSE__SINGLE_INSTANCE_ENVIRONMENT
statusops.status('my-application', 'environment-1', True)
log_alert_mock.assert_not_called()
echo_mock.assert_has_calls(
[
mock.call('Environment details for:', 'environment-1'),
mock.call(' Application name:', 'my-application'),
mock.call(' Region:', 'us-west-2'),
mock.call(' Deployed Version:', 'Sample Application'),
mock.call(' Environment ID:', 'e-sfsdfsfasdads'),
mock.call(' Platform:', PlatformVersion('arn:aws:elasticbeanstalk:us-west-2::platform/PHP 7.1 running on 64bit Amazon Linux/2.6.6')),
mock.call(' Tier:', Tier.from_raw_string('webserver')),
mock.call(' CNAME:', 'environment-1.us-west-2.elasticbeanstalk.com'),
mock.call(' Updated:', datetime.datetime(2018, 3, 27, 23, 47, 41, 830000, tzinfo=tz.tzutc())),
mock.call(' Status:', 'Ready'),
mock.call(' Health:', 'Green'),
mock.call(' Running instances:', 1),
mock.call('Current CodeCommit settings:'),
mock.call(' Repository: repository'),
mock.call(' Branch: branch')
]
)
| 57.478803
| 194
| 0.668793
| 2,642
| 23,049
| 5.573051
| 0.080621
| 0.057593
| 0.048492
| 0.08313
| 0.911913
| 0.905936
| 0.898533
| 0.898533
| 0.898533
| 0.892828
| 0
| 0.028287
| 0.219315
| 23,049
| 400
| 195
| 57.6225
| 0.789986
| 0.023255
| 0
| 0.793388
| 0
| 0.066116
| 0.375822
| 0.196533
| 0
| 0
| 0
| 0
| 0.038567
| 1
| 0.019284
| false
| 0
| 0.024793
| 0
| 0.046832
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bac57ee560523b7e56931258d1a01d4ff5237251
| 85
|
py
|
Python
|
make-your-own-text-adventure-with-python/homework/calculator.py
|
learning-game-development/learning-python-game-development
|
326b72eadab0bfb14f70f295b492f76d139dde33
|
[
"Unlicense"
] | null | null | null |
make-your-own-text-adventure-with-python/homework/calculator.py
|
learning-game-development/learning-python-game-development
|
326b72eadab0bfb14f70f295b492f76d139dde33
|
[
"Unlicense"
] | null | null | null |
make-your-own-text-adventure-with-python/homework/calculator.py
|
learning-game-development/learning-python-game-development
|
326b72eadab0bfb14f70f295b492f76d139dde33
|
[
"Unlicense"
] | null | null | null |
def add(value_one, value_two):
return value_one + value_two
print(add(45, 55))
| 14.166667
| 32
| 0.705882
| 15
| 85
| 3.733333
| 0.6
| 0.285714
| 0.464286
| 0.571429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 0.176471
| 85
| 5
| 33
| 17
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.333333
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
79fbe489a2124bdb4c4a57d2b2dd43dcb5efb3d7
| 208
|
py
|
Python
|
pkg/__init__.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
pkg/__init__.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
pkg/__init__.py
|
sveatlo/detektilo
|
d4a2f4abb90be5238ab537e648f35a2e4dc703a5
|
[
"MIT"
] | null | null | null |
from .detector import *
from .extractor import *
from .image import *
from .job import *
from .matcher import *
from .presentation import *
from .video import *
from .video_frame import *
from .misc import *
| 20.8
| 27
| 0.740385
| 28
| 208
| 5.464286
| 0.392857
| 0.522876
| 0.196078
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173077
| 208
| 9
| 28
| 23.111111
| 0.889535
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
03163aeea571bcd5c7cedd8a3c8eb60714414269
| 14,542
|
py
|
Python
|
src/providerhub/azext_providerhub/generated/custom.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 2
|
2021-06-05T17:51:26.000Z
|
2021-11-17T11:17:56.000Z
|
src/providerhub/azext_providerhub/generated/custom.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 3
|
2020-05-27T20:16:26.000Z
|
2020-07-23T19:46:49.000Z
|
src/providerhub/azext_providerhub/generated/custom.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 5
|
2020-09-08T22:46:48.000Z
|
2020-11-08T14:54:35.000Z
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
# pylint: disable=unused-argument
# pylint: disable=too-many-locals
from azure.cli.core.util import sdk_no_wait
def providerhub_custom_rollout_list(client, provider_namespace):
return client.list_by_provider_registration(provider_namespace=provider_namespace)
def providerhub_custom_rollout_show(client, provider_namespace, rollout_name):
return client.get(provider_namespace=provider_namespace, rollout_name=rollout_name)
def providerhub_custom_rollout_create(client, provider_namespace, rollout_name, canary):
properties = {"specification": {"Canary": canary}}
return client.create_or_update(
provider_namespace=provider_namespace,
rollout_name=rollout_name,
properties=properties,
)
def providerhub_custom_rollout_update(client, provider_namespace, rollout_name, canary):
properties = {"specification": {"Canary": canary}}
return client.create_or_update(
provider_namespace=provider_namespace,
rollout_name=rollout_name,
properties=properties,
)
def providerhub_default_rollout_list(client, provider_namespace):
return client.list_by_provider_registration(provider_namespace=provider_namespace)
def providerhub_default_rollout_show(client, provider_namespace, rollout_name):
return client.get(provider_namespace=provider_namespace, rollout_name=rollout_name)
def providerhub_default_rollout_create(
client,
provider_namespace,
rollout_name,
row2_wait_duration,
skip_regions=None,
no_wait=False,
):
return sdk_no_wait(
no_wait,
client.begin_create_or_update,
provider_namespace=provider_namespace,
rollout_name=rollout_name,
row2_wait_duration=row2_wait_duration,
skip_regions=skip_regions,
)
def providerhub_default_rollout_update(
client,
provider_namespace,
rollout_name,
row2_wait_duration,
skip_regions=None,
no_wait=False,
):
return sdk_no_wait(
no_wait,
client.begin_create_or_update,
provider_namespace=provider_namespace,
rollout_name=rollout_name,
row2_wait_duration=row2_wait_duration,
skip_regions=skip_regions,
)
def providerhub_default_rollout_delete(client, provider_namespace, rollout_name):
return client.delete(
provider_namespace=provider_namespace, rollout_name=rollout_name
)
def providerhub_default_rollout_stop(client, provider_namespace, rollout_name):
return client.stop(provider_namespace=provider_namespace, rollout_name=rollout_name)
def providerhub_manifest_checkin(
client, provider_namespace, environment, arm_manifest_location
):
checkin_manifest_params = {}
checkin_manifest_params["environment"] = environment
checkin_manifest_params[
"baseline_arm_manifest_location"
] = arm_manifest_location
return client.checkin_manifest(
provider_namespace=provider_namespace,
checkin_manifest_params=checkin_manifest_params,
)
def providerhub_manifest_generate(client, provider_namespace):
return client.generate_manifest(provider_namespace=provider_namespace)
def providerhub_operation_list(client, provider_namespace):
return client.list_by_provider_registration(provider_namespace=provider_namespace)
def providerhub_operation_create(client, provider_namespace):
return client.create_or_update(provider_namespace=provider_namespace)
def providerhub_operation_update(client, provider_namespace):
return client.create_or_update(provider_namespace=provider_namespace)
def providerhub_operation_delete(client, provider_namespace):
return client.delete(provider_namespace=provider_namespace)
def providerhub_provider_registration_list(client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name=resource_group_name)
return client.list()
def providerhub_provider_registration_show(client, provider_namespace):
return client.get(provider_namespace=provider_namespace)
def providerhub_provider_registration_create(
client,
provider_namespace,
provider_version="2.0",
namespace=None,
provider_type=None,
provider_authentication=None,
provider_authorizations=None,
capabilities=None,
metadata=None,
template_deployment_options=None,
schema_owners=None,
manifest_owners=None,
incident_routing_service=None,
incident_routing_team=None,
incident_contact_email=None,
service_tree_infos=None,
resource_access_policy=None,
opt_in_headers=None,
required_features_policy=None,
managed_by_tenant_id=None,
providerhub_metadata_provider_authorizations=None,
providerhub_metadata_rp_authentication=None,
lighthouse_authorizations=None,
no_wait=False,
):
return sdk_no_wait(
no_wait,
client.begin_create_or_update,
provider_namespace=provider_namespace,
provider_authentication=provider_authentication,
provider_authorizations=provider_authorizations,
namespace=namespace,
provider_version=provider_version,
provider_type=provider_type,
capabilities=capabilities,
metadata=metadata,
template_deployment_options=template_deployment_options,
schema_owners=schema_owners,
manifest_owners=manifest_owners,
incident_routing_service=incident_routing_service,
incident_routing_team=incident_routing_team,
incident_contact_email=incident_contact_email,
service_tree_infos=service_tree_infos,
resource_access_policy=resource_access_policy,
opt_in_headers=opt_in_headers,
required_features_policy=required_features_policy,
managed_by_tenant_id=managed_by_tenant_id,
providerhub_metadata_provider_authorizations=providerhub_metadata_provider_authorizations,
providerhub_metadata_rp_authentication=providerhub_metadata_rp_authentication,
lighthouse_authorizations=lighthouse_authorizations
)
def providerhub_provider_registration_update(
client,
provider_namespace,
provider_version="2.0",
provider_type=None,
provider_authentication=None,
provider_authorizations=None,
capabilities=None,
metadata=None,
template_deployment_options=None,
schema_owners=None,
manifest_owners=None,
incident_routing_service=None,
incident_routing_team=None,
incident_contact_email=None,
service_tree_infos=None,
resource_access_policy=None,
opt_in_headers=None,
required_features_policy=None,
managed_by_tenant_id=None,
providerhub_metadata_provider_authorizations=None,
providerhub_metadata_rp_authentication=None,
lighthouse_authorizations=None,
no_wait=False,
):
return sdk_no_wait(
no_wait,
client.begin_create_or_update,
provider_namespace=provider_namespace,
provider_authentication=provider_authentication,
provider_authorizations=provider_authorizations,
provider_version=provider_version,
provider_type=provider_type,
capabilities=capabilities,
metadata=metadata,
template_deployment_options=template_deployment_options,
schema_owners=schema_owners,
manifest_owners=manifest_owners,
incident_routing_service=incident_routing_service,
incident_routing_team=incident_routing_team,
incident_contact_email=incident_contact_email,
service_tree_infos=service_tree_infos,
resource_access_policy=resource_access_policy,
opt_in_headers=opt_in_headers,
required_features_policy=required_features_policy,
managed_by_tenant_id=managed_by_tenant_id,
providerhub_metadata_provider_authorizations=providerhub_metadata_provider_authorizations,
providerhub_metadata_rp_authentication=providerhub_metadata_rp_authentication,
lighthouse_authorizations=lighthouse_authorizations
)
def providerhub_provider_registration_delete(client, provider_namespace):
return client.delete(provider_namespace=provider_namespace)
def providerhub_provider_registration_generate_operation(client, provider_namespace):
return client.generate_operations(provider_namespace=provider_namespace)
def providerhub_resource_type_registration_list(client, provider_namespace):
return client.list_by_provider_registration(provider_namespace=provider_namespace)
def providerhub_resource_type_registration_show(
client, provider_namespace, resource_type
):
return client.get(
provider_namespace=provider_namespace, resource_type=resource_type
)
def providerhub_resource_type_registration_create( # pylint: disable=too-many-locals
client,
provider_namespace,
resource_type,
routing_type=None,
regionality=None,
endpoints=None,
marketplace_type=None,
resource_creation_begin=None,
resource_patch_begin=None,
swagger_specifications=None,
allowed_unauthorized_actions=None,
authorization_action_mappings=None,
linked_access_checks=None,
default_api_version=None,
logging_rules=None,
throttling_rules=None,
required_features=None,
enable_async_operation=None,
enable_third_party_s2s=None,
is_pure_proxy=None,
identity_management=None,
check_name_availability_specifications=None,
disallowed_action_verbs=None,
service_tree_infos=None,
subscription_state_rules=None,
template_deployment_options=None,
extended_locations=None,
resource_move_policy=None,
resource_deletion_policy=None,
opt_in_headers=None,
required_features_policy=None,
):
return client.begin_create_or_update(
provider_namespace=provider_namespace,
resource_type=resource_type,
routing_type=routing_type,
regionality=regionality,
endpoints=endpoints,
resource_creation_begin=resource_creation_begin,
resource_patch_begin=resource_patch_begin,
marketplace_type=marketplace_type,
swagger_specifications=swagger_specifications,
allowed_unauthorized_actions=allowed_unauthorized_actions,
authorization_action_mappings=authorization_action_mappings,
linked_access_checks=linked_access_checks,
default_api_version=default_api_version,
logging_rules=logging_rules,
throttling_rules=throttling_rules,
required_features=required_features,
enable_async_operation=enable_async_operation,
enable_third_party_s2s=enable_third_party_s2s,
is_pure_proxy=is_pure_proxy,
identity_management=identity_management,
check_name_availability_specifications=check_name_availability_specifications,
disallowed_action_verbs=disallowed_action_verbs,
service_tree_infos=service_tree_infos,
subscription_state_rules=subscription_state_rules,
template_deployment_options=template_deployment_options,
extended_locations=extended_locations,
resource_move_policy=resource_move_policy,
resource_deletion_policy=resource_deletion_policy,
opt_in_headers=opt_in_headers,
required_features_policy=required_features_policy,
)
def providerhub_resource_type_registration_update( # pylint: disable=too-many-locals
client,
provider_namespace,
resource_type,
routing_type=None,
regionality=None,
endpoints=None,
marketplace_type=None,
resource_creation_begin=None,
resource_patch_begin=None,
swagger_specifications=None,
allowed_unauthorized_actions=None,
authorization_action_mappings=None,
linked_access_checks=None,
default_api_version=None,
logging_rules=None,
throttling_rules=None,
required_features=None,
enable_async_operation=None,
enable_third_party_s2s=None,
is_pure_proxy=None,
identity_management=None,
check_name_availability_specifications=None,
disallowed_action_verbs=None,
service_tree_infos=None,
subscription_state_rules=None,
template_deployment_options=None,
extended_locations=None,
resource_move_policy=None,
resource_deletion_policy=None,
opt_in_headers=None,
required_features_policy=None,
):
return client.begin_create_or_update(
provider_namespace=provider_namespace,
resource_type=resource_type,
routing_type=routing_type,
regionality=regionality,
endpoints=endpoints,
resource_creation_begin=resource_creation_begin,
resource_patch_begin=resource_patch_begin,
marketplace_type=marketplace_type,
swagger_specifications=swagger_specifications,
allowed_unauthorized_actions=allowed_unauthorized_actions,
authorization_action_mappings=authorization_action_mappings,
linked_access_checks=linked_access_checks,
default_api_version=default_api_version,
logging_rules=logging_rules,
throttling_rules=throttling_rules,
required_features=required_features,
enable_async_operation=enable_async_operation,
enable_third_party_s2s=enable_third_party_s2s,
is_pure_proxy=is_pure_proxy,
identity_management=identity_management,
check_name_availability_specifications=check_name_availability_specifications,
disallowed_action_verbs=disallowed_action_verbs,
service_tree_infos=service_tree_infos,
subscription_state_rules=subscription_state_rules,
template_deployment_options=template_deployment_options,
extended_locations=extended_locations,
resource_move_policy=resource_move_policy,
resource_deletion_policy=resource_deletion_policy,
opt_in_headers=opt_in_headers,
required_features_policy=required_features_policy,
)
def providerhub_resource_type_registration_delete(
client, provider_namespace, resource_type
):
return client.delete(
provider_namespace=provider_namespace, resource_type=resource_type
)
| 35.642157
| 98
| 0.776166
| 1,578
| 14,542
| 6.666033
| 0.10583
| 0.126058
| 0.0713
| 0.084038
| 0.923947
| 0.908451
| 0.886016
| 0.847989
| 0.830497
| 0.821371
| 0
| 0.001309
| 0.159263
| 14,542
| 407
| 99
| 35.72973
| 0.859071
| 0.043185
| 0
| 0.825959
| 0
| 0
| 0.006116
| 0.002159
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079646
| false
| 0
| 0.00295
| 0.067847
| 0.165192
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0325b1727da450f1e97250259a3be3df6e732119
| 1,221
|
py
|
Python
|
src/AWSLibrary/base/exceptions.py
|
hfcr2017/robotframework-aws
|
aad38bffcd7fecb13ded086dd77cc1d3ec6a9bf7
|
[
"MIT"
] | 11
|
2020-01-20T02:07:47.000Z
|
2022-02-01T14:11:58.000Z
|
src/AWSLibrary/base/exceptions.py
|
hfcr2017/robotframework-aws
|
aad38bffcd7fecb13ded086dd77cc1d3ec6a9bf7
|
[
"MIT"
] | 22
|
2019-12-15T17:11:20.000Z
|
2021-12-09T06:50:35.000Z
|
src/AWSLibrary/base/exceptions.py
|
hfcr2017/robotframework-aws
|
aad38bffcd7fecb13ded086dd77cc1d3ec6a9bf7
|
[
"MIT"
] | 13
|
2020-01-02T20:08:59.000Z
|
2022-03-27T04:07:20.000Z
|
import logging
from robot.api import logger
class FatalError(RuntimeError):
def __init__(self, message):
ROBOT_EXIT_ON_FAILURE = True
self.logger = logging.getLogger(__name__)
self.rb_logger = logger
logger.error(f'Error: {RuntimeError}')
self.logger.critical(f'Error: {RuntimeError} | Message: {message}')
self.rb_logger.error(f'Error: {RuntimeError} | Message: {message}')
class KeywordError(RuntimeError):
def __init__(self, message):
ROBOT_SUPPRESS_NAME = True
self.logger = logging.getLogger(__name__)
self.rb_logger = logger
logger.error(f'Error: {RuntimeError}')
self.logger.critical(f'Error: {RuntimeError} | Message: {message}')
self.rb_logger.error(f'Error: {RuntimeError} | Message: {message}')
class ContinuableError(RuntimeError):
def __init__(self, message):
ROBOT_CONTINUE_ON_FAILURE = True
self.logger = logging.getLogger(__name__)
self.rb_logger = logger
logger.error(f'Error: {RuntimeError}')
self.logger.critical(f'Error: {RuntimeError} | Message: {message}')
self.rb_logger.error(f'Error: {RuntimeError} | Message: {message}')
| 38.15625
| 75
| 0.667486
| 135
| 1,221
| 5.755556
| 0.2
| 0.069498
| 0.208494
| 0.131274
| 0.866152
| 0.866152
| 0.731017
| 0.731017
| 0.731017
| 0.731017
| 0
| 0
| 0.211302
| 1,221
| 32
| 76
| 38.15625
| 0.806854
| 0
| 0
| 0.692308
| 0
| 0
| 0.257774
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.115385
| false
| 0
| 0.076923
| 0
| 0.307692
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0349680124e6f08231e179d851fe1b1b49f4b936
| 19,161
|
py
|
Python
|
tests/test_cli.py
|
0xflotus/cryptosteganography
|
b0a31ba1426d790209b3572e079600762f01446a
|
[
"MIT"
] | 82
|
2018-02-06T03:54:11.000Z
|
2022-03-29T13:49:47.000Z
|
tests/test_cli.py
|
blackhatethicalhacking/cryptosteganography
|
b0a31ba1426d790209b3572e079600762f01446a
|
[
"MIT"
] | 6
|
2018-03-25T11:36:01.000Z
|
2022-01-29T17:32:30.000Z
|
tests/test_cli.py
|
blackhatethicalhacking/cryptosteganography
|
b0a31ba1426d790209b3572e079600762f01446a
|
[
"MIT"
] | 20
|
2018-03-23T08:49:56.000Z
|
2022-02-10T20:25:18.000Z
|
import argparse
import builtins
import io
import os
import sys
from unittest import mock
from exitstatus import ExitStatus
import pytest
from cryptosteganography import cli
from cryptosteganography.utils import get_output_image_filename
INPUT_IMAGE = 'tests/assets/test_image.jpg'
INPUT_MESSAGE_TEXT_FILE = 'tests/assets/test_file1.txt'
INPUT_MESSAGE_TEXT_EMPTY_FILE = 'tests/assets/test_file2.txt'
INPUT_MESSAGE_AUDIO_FILE = 'tests/assets/test_file.mp3'
OUTPUT_IMAGE = 'tests/output_files/image_file_cli.png'
OUTPUT_IMAGE_JPG_EXPECTED = 'tests/output_files/image_file_cli_other.jpg'
OUTPUT_MESSAGE_FILE = 'tests/output_files/message_file_cli.txt'
OUTPUT_MESSAGE_AUDIO_FILE = 'tests/output_files/test_file_cli.mp3'
# The cli change any change output format to PNG
OUTPUT_IMAGE_JPG_REAL = get_output_image_filename(OUTPUT_IMAGE_JPG_EXPECTED)
def patch_open(open_func, files):
def open_patched(
path,
mode='r',
buffering=-1,
encoding=None,
errors=None,
newline=None,
closefd=True,
opener=None
):
if 'w' in mode and not os.path.isfile(path):
files.append(path)
return open_func(
path,
mode=mode,
buffering=buffering,
encoding=encoding,
errors=errors,
newline=newline,
closefd=closefd,
opener=opener
)
return open_patched
@pytest.fixture()
def cleanup_files(monkeypatch):
"""
Delete files created by the tests.
"""
files = [
'output.png',
OUTPUT_IMAGE,
OUTPUT_IMAGE_JPG_REAL,
OUTPUT_MESSAGE_FILE,
OUTPUT_MESSAGE_AUDIO_FILE
]
monkeypatch.setattr(builtins, 'open', patch_open(builtins.open, files))
monkeypatch.setattr(io, 'open', patch_open(io.open, files))
yield
for file in files:
try:
os.remove(file)
except FileNotFoundError:
pass
def test_init():
with mock.patch.object(cli, 'main', return_value=42):
with mock.patch.object(cli, '__name__', '__main__'):
with mock.patch.object(cli.sys, 'exit') as mock_exit:
cli.init()
assert mock_exit.call_args[0][0] == 42
def test_argparse_input_empty():
# calling with no arguments goes to look at sys.argv, which is our arguments to py.test.
with pytest.raises((SystemExit, NotImplementedError)):
cli.main()
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command=''
)
)
def test_empty_command(mock_args, monkeypatch, capsys) -> None:
"""
Test if show help when command is empty
"""
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == """usage: cryptosteganography [-h] [-v] {save,retrieve} ...
Cryptosteganography is an application to save or retrieve an encrypted message
or encrypted file concealed inside an image.
positional arguments:
{save,retrieve} sub-command help
save save help
retrieve retrieve help
optional arguments:
-h, --help show this help message and exit
-v, --version show program's version number and exit\n"""
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='cdsdcs'
)
)
def test_invalid_command(mock_args, monkeypatch, capsys) -> None:
"""
Test if show help when command is invalid
"""
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == """usage: cryptosteganography [-h] [-v] {save,retrieve} ...
Cryptosteganography is an application to save or retrieve an encrypted message
or encrypted file concealed inside an image.
positional arguments:
{save,retrieve} sub-command help
save save help
retrieve retrieve help
optional arguments:
-h, --help show this help message and exit
-v, --version show program's version number and exit\n"""
###############################
# Save - Message String Tests #
###############################
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='Hello World. 你好,世界!!!'
)
)
def test_save_message_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == u'Output image {} saved with success\n'.format(OUTPUT_IMAGE)
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file='',
message='Hello World. 你好,世界!!!'
)
)
def test_save_message_empty_output_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == u'Output image output.png saved with success\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file='bablakjbla.png',
output_image_file=OUTPUT_IMAGE,
message='Hello. 你好,世界!!!'
)
)
def test_save_message_input_image_file_not_found_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == 'Failed: Input file bablakjbla.png not found.\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_MESSAGE_TEXT_FILE,
output_image_file=OUTPUT_IMAGE,
message='Hello. 你好,世界!!!'
)
)
def test_save_message_invalid_input_image_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='',
message_file=''
)
)
def test_save_message_empty_message_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: 'uhf8hf838fuh')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == "Failed: Message can't be empty\n"
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='Hello World. 你好,世界!!!'
)
)
def test_save_message_empty_password_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: ' ')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == "Failed: Password can't be empty\n"
###################################
# Retrieve - Message String Tests #
###################################
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE,
retrieved_file=None
)
)
def test_retrieve_message_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == 'Hello World. 你好,世界!!!\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE,
retrieved_file=OUTPUT_MESSAGE_FILE
)
)
def test_retrieve_message_as_file_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == '{} saved with success\n'.format(OUTPUT_MESSAGE_FILE)
with open(OUTPUT_MESSAGE_FILE, 'rb') as f:
message = f.read()
assert message.decode('utf-8') == 'Hello World. 你好,世界!!!'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE,
retrieved_file=None
)
)
def test_retrieve_message_invalid_password(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: 'Wrong Password')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == 'No valid data found\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file='bablakjbla.png',
retrieved_file=None
)
)
def test_retrieve_message_input_image_file_not_found_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == 'Failed: Input file bablakjbla.png not found.\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=INPUT_MESSAGE_TEXT_FILE,
retrieved_file=None
)
)
def test_retrieve_message_invalid_input_image_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '48dj_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE,
retrieved_file=None
)
)
def test_retrieve_message_empty_password_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: ' ')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == "Failed: Password can't be empty\n"
#############################
# Save - Message File Tests #
#############################
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='',
message_file=INPUT_MESSAGE_TEXT_FILE
)
)
def test_save_message_file_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '7348hffbsd_33222_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == u'Output image {} saved with success\n'.format(OUTPUT_IMAGE)
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='',
message_file=INPUT_MESSAGE_AUDIO_FILE
)
)
def test_save_message_audio_file_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '7348hffbsd_33222_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == u'Output image {} saved with success\n'.format(OUTPUT_IMAGE)
assert os.path.isfile(INPUT_MESSAGE_AUDIO_FILE)
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='',
message_file='invalid file'
)
)
def test_save_message_file_not_found_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '7348hffbsd_33222_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == 'Failed: File invalid file not found.\n'
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE,
message='',
message_file=INPUT_MESSAGE_TEXT_EMPTY_FILE
)
)
def test_save_message_file_empty_error(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '7348hffbsd_33222_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.failure
output = str(capsys.readouterr().out)
assert output == "Failed: Message file content can't be empty\n"
#################################
# Retrieve - Message File Tests #
#################################
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE,
retrieved_file=OUTPUT_MESSAGE_AUDIO_FILE
)
)
def test_retrieve_message_audio_file_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: '7348hffbsd_33222_你好,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == '{} saved with success\n'.format(OUTPUT_MESSAGE_AUDIO_FILE)
# Compare if original file is equal to retrieved file
with open(OUTPUT_MESSAGE_AUDIO_FILE, 'rb') as audio_file:
output_audio = audio_file.read()
with open(INPUT_MESSAGE_AUDIO_FILE, 'rb') as original_audio_file:
original_audio = original_audio_file.read()
assert original_audio == output_audio
#######################################
# Save/Retrieve - JPG output expected #
#######################################
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='save',
input_image_file=INPUT_IMAGE,
output_image_file=OUTPUT_IMAGE_JPG_EXPECTED,
message='Hello World. 你好,世界!!!'
)
)
def test_save_message_jpg_output_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: 'Test,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
# Note: The output format is png
assert output == u'Output image {} saved with success\n'.format(OUTPUT_IMAGE_JPG_REAL)
@mock.patch(
'argparse.ArgumentParser.parse_args',
return_value=argparse.Namespace(
command='retrieve',
input_image_file=OUTPUT_IMAGE_JPG_REAL,
retrieved_file=None
)
)
def test_retrieve_message_jpg_success(mock_args, monkeypatch, capsys) -> None:
# Pasword prompt
monkeypatch.setattr('getpass.getpass', lambda prompt: 'Test,世界')
# Call CLI
with pytest.raises(SystemExit) as pytest_wrapped_e:
sys.exit(cli.main())
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == ExitStatus.success
output = str(capsys.readouterr().out)
assert output == 'Hello World. 你好,世界!!!\n'
| 31.004854
| 99
| 0.686707
| 2,347
| 19,161
| 5.374095
| 0.078824
| 0.064933
| 0.069928
| 0.066598
| 0.848331
| 0.821295
| 0.811306
| 0.800682
| 0.79759
| 0.794498
| 0
| 0.004994
| 0.195293
| 19,161
| 617
| 100
| 31.055105
| 0.813023
| 0.050363
| 0
| 0.63242
| 0
| 0
| 0.184597
| 0.061138
| 0
| 0
| 0
| 0
| 0.148402
| 1
| 0.059361
| false
| 0.057078
| 0.022831
| 0
| 0.086758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cee21853bcedb5dadc703f34cd463807b7448c15
| 48,036
|
py
|
Python
|
nftx/python/antchain_sdk_nftx/models.py
|
alipay/antchain-openapi-prod-sdk
|
f78549e5135d91756093bd88d191ca260b28e083
|
[
"MIT"
] | 6
|
2020-06-28T06:40:50.000Z
|
2022-02-25T11:02:18.000Z
|
nftx/python/antchain_sdk_nftx/models.py
|
alipay/antchain-openapi-prod-sdk
|
f78549e5135d91756093bd88d191ca260b28e083
|
[
"MIT"
] | null | null | null |
nftx/python/antchain_sdk_nftx/models.py
|
alipay/antchain-openapi-prod-sdk
|
f78549e5135d91756093bd88d191ca260b28e083
|
[
"MIT"
] | 6
|
2020-06-30T09:29:03.000Z
|
2022-01-07T10:42:22.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import List
class Config(TeaModel):
"""
Model for initing client
"""
def __init__(
self,
access_key_id: str = None,
access_key_secret: str = None,
security_token: str = None,
protocol: str = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
endpoint: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
user_agent: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
max_idle_time_millis: int = None,
keep_alive_duration_millis: int = None,
max_requests: int = None,
max_requests_per_host: int = None,
):
# accesskey id
self.access_key_id = access_key_id
# accesskey secret
self.access_key_secret = access_key_secret
# security token
self.security_token = security_token
# http protocol
self.protocol = protocol
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy
self.http_proxy = http_proxy
# https proxy
self.https_proxy = https_proxy
# endpoint
self.endpoint = endpoint
# proxy white list
self.no_proxy = no_proxy
# max idle conns
self.max_idle_conns = max_idle_conns
# user agent
self.user_agent = user_agent
# socks5 proxy
self.socks_5proxy = socks_5proxy
# socks5 network
self.socks_5net_work = socks_5net_work
# 长链接最大空闲时长
self.max_idle_time_millis = max_idle_time_millis
# 长链接最大连接时长
self.keep_alive_duration_millis = keep_alive_duration_millis
# 最大连接数(长链接最大总数)
self.max_requests = max_requests
# 每个目标主机的最大连接数(分主机域名的长链接最大总数
self.max_requests_per_host = max_requests_per_host
def validate(self):
pass
def to_map(self):
result = dict()
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.protocol is not None:
result['protocol'] = self.protocol
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.user_agent is not None:
result['userAgent'] = self.user_agent
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
if self.max_idle_time_millis is not None:
result['maxIdleTimeMillis'] = self.max_idle_time_millis
if self.keep_alive_duration_millis is not None:
result['keepAliveDurationMillis'] = self.keep_alive_duration_millis
if self.max_requests is not None:
result['maxRequests'] = self.max_requests
if self.max_requests_per_host is not None:
result['maxRequestsPerHost'] = self.max_requests_per_host
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
if m.get('maxIdleTimeMillis') is not None:
self.max_idle_time_millis = m.get('maxIdleTimeMillis')
if m.get('keepAliveDurationMillis') is not None:
self.keep_alive_duration_millis = m.get('keepAliveDurationMillis')
if m.get('maxRequests') is not None:
self.max_requests = m.get('maxRequests')
if m.get('maxRequestsPerHost') is not None:
self.max_requests_per_host = m.get('maxRequestsPerHost')
return self
class File(TeaModel):
def __init__(
self,
original_file_path: str = None,
mini_image_path: str = None,
name: str = None,
type: str = None,
):
# 原文件的可访问路径
self.original_file_path = original_file_path
# 缩略图的可访问路径
self.mini_image_path = mini_image_path
# 文件名称
self.name = name
# IMAGE("IMAGE","图片"),
# VIDEO("VIDEO","视频"),
# ;
self.type = type
def validate(self):
self.validate_required(self.original_file_path, 'original_file_path')
self.validate_required(self.mini_image_path, 'mini_image_path')
self.validate_required(self.name, 'name')
self.validate_required(self.type, 'type')
def to_map(self):
result = dict()
if self.original_file_path is not None:
result['original_file_path'] = self.original_file_path
if self.mini_image_path is not None:
result['mini_image_path'] = self.mini_image_path
if self.name is not None:
result['name'] = self.name
if self.type is not None:
result['type'] = self.type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('original_file_path') is not None:
self.original_file_path = m.get('original_file_path')
if m.get('mini_image_path') is not None:
self.mini_image_path = m.get('mini_image_path')
if m.get('name') is not None:
self.name = m.get('name')
if m.get('type') is not None:
self.type = m.get('type')
return self
class UserAsset(TeaModel):
def __init__(
self,
sku_id: int = None,
nft_id: str = None,
sku_name: str = None,
author_name: str = None,
issuer_name: str = None,
mini_image_path: str = None,
):
# NFT商品的商品编码
self.sku_id = sku_id
# NFT资产的唯一编码
self.nft_id = nft_id
# NFT商品的名称
self.sku_name = sku_name
# NFT的创作者名称
self.author_name = author_name
# NFT的发行方名称
self.issuer_name = issuer_name
# 缩略图url,带5分钟鉴权
self.mini_image_path = mini_image_path
def validate(self):
self.validate_required(self.sku_id, 'sku_id')
self.validate_required(self.nft_id, 'nft_id')
self.validate_required(self.sku_name, 'sku_name')
self.validate_required(self.author_name, 'author_name')
self.validate_required(self.issuer_name, 'issuer_name')
self.validate_required(self.mini_image_path, 'mini_image_path')
def to_map(self):
result = dict()
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.nft_id is not None:
result['nft_id'] = self.nft_id
if self.sku_name is not None:
result['sku_name'] = self.sku_name
if self.author_name is not None:
result['author_name'] = self.author_name
if self.issuer_name is not None:
result['issuer_name'] = self.issuer_name
if self.mini_image_path is not None:
result['mini_image_path'] = self.mini_image_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
if m.get('sku_name') is not None:
self.sku_name = m.get('sku_name')
if m.get('author_name') is not None:
self.author_name = m.get('author_name')
if m.get('issuer_name') is not None:
self.issuer_name = m.get('issuer_name')
if m.get('mini_image_path') is not None:
self.mini_image_path = m.get('mini_image_path')
return self
class ImportNftCreateRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
project_id: str = None,
sku_name: str = None,
sku_type: str = None,
quantity: int = None,
author: str = None,
owner: str = None,
creation_time: str = None,
biz_type: str = None,
description: str = None,
jump_url: str = None,
files: List[File] = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 对端自己的项目id;用作幂等字段
self.project_id = project_id
# NFT艺术品的名称
self.sku_name = sku_name
# IMAGE("IMAGE","图片"),
# VIDEO("VIDEO","视频"),
# ;
self.sku_type = sku_type
# 发行数量
self.quantity = quantity
# 艺术品作者
self.author = author
# 艺术品拥有者
self.owner = owner
# 艺术品创作时间
self.creation_time = creation_time
# 用于标识NFT资产来自特定的产品,用作特殊的交互跳转需求,非必填字段
self.biz_type = biz_type
# nft简介信息
self.description = description
# 跳转链接
self.jump_url = jump_url
# nft发行的艺术品文件
self.files = files
def validate(self):
self.validate_required(self.project_id, 'project_id')
self.validate_required(self.sku_name, 'sku_name')
self.validate_required(self.sku_type, 'sku_type')
self.validate_required(self.quantity, 'quantity')
self.validate_required(self.author, 'author')
self.validate_required(self.owner, 'owner')
self.validate_required(self.creation_time, 'creation_time')
if self.creation_time is not None:
self.validate_pattern(self.creation_time, 'creation_time', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
self.validate_required(self.description, 'description')
self.validate_required(self.files, 'files')
if self.files:
for k in self.files:
if k:
k.validate()
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.project_id is not None:
result['project_id'] = self.project_id
if self.sku_name is not None:
result['sku_name'] = self.sku_name
if self.sku_type is not None:
result['sku_type'] = self.sku_type
if self.quantity is not None:
result['quantity'] = self.quantity
if self.author is not None:
result['author'] = self.author
if self.owner is not None:
result['owner'] = self.owner
if self.creation_time is not None:
result['creation_time'] = self.creation_time
if self.biz_type is not None:
result['biz_type'] = self.biz_type
if self.description is not None:
result['description'] = self.description
if self.jump_url is not None:
result['jump_url'] = self.jump_url
result['files'] = []
if self.files is not None:
for k in self.files:
result['files'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('sku_name') is not None:
self.sku_name = m.get('sku_name')
if m.get('sku_type') is not None:
self.sku_type = m.get('sku_type')
if m.get('quantity') is not None:
self.quantity = m.get('quantity')
if m.get('author') is not None:
self.author = m.get('author')
if m.get('owner') is not None:
self.owner = m.get('owner')
if m.get('creation_time') is not None:
self.creation_time = m.get('creation_time')
if m.get('biz_type') is not None:
self.biz_type = m.get('biz_type')
if m.get('description') is not None:
self.description = m.get('description')
if m.get('jump_url') is not None:
self.jump_url = m.get('jump_url')
self.files = []
if m.get('files') is not None:
for k in m.get('files'):
temp_model = File()
self.files.append(temp_model.from_map(k))
return self
class ImportNftCreateResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: int = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT发行成功的商品id
self.sku_id = sku_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
return self
class QueryNftCreateRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
project_id: str = None,
sku_id: int = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 对端自己的项目id;NFT发行时用作幂等字段
self.project_id = project_id
# NFT发行成功的商品id,传入sku_id时以此为准做查询
self.sku_id = sku_id
def validate(self):
self.validate_required(self.project_id, 'project_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.project_id is not None:
result['project_id'] = self.project_id
if self.sku_id is not None:
result['sku_id'] = self.sku_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
return self
class QueryNftCreateResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: int = None,
sku_status: str = None,
sku_name: str = None,
author_name: str = None,
issuer_name: str = None,
mini_image_path: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT发行成功的商品id
self.sku_id = sku_id
# INIT("INIT", "初始化"),
# PROCESSING("PROCESSING", "资产创建中"),
# FINISH("FINISH", "资产初始化完毕"),
# ;
self.sku_status = sku_status
# NFT商品的名称
self.sku_name = sku_name
# 作者的名称
self.author_name = author_name
# 发行方的名称
self.issuer_name = issuer_name
# 缩略图展示路径
self.mini_image_path = mini_image_path
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.sku_status is not None:
result['sku_status'] = self.sku_status
if self.sku_name is not None:
result['sku_name'] = self.sku_name
if self.author_name is not None:
result['author_name'] = self.author_name
if self.issuer_name is not None:
result['issuer_name'] = self.issuer_name
if self.mini_image_path is not None:
result['mini_image_path'] = self.mini_image_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('sku_status') is not None:
self.sku_status = m.get('sku_status')
if m.get('sku_name') is not None:
self.sku_name = m.get('sku_name')
if m.get('author_name') is not None:
self.author_name = m.get('author_name')
if m.get('issuer_name') is not None:
self.issuer_name = m.get('issuer_name')
if m.get('mini_image_path') is not None:
self.mini_image_path = m.get('mini_image_path')
return self
class ExecNftTransferRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
sku_id: int = None,
to_id_no: str = None,
to_id_type: str = None,
order_no: str = None,
order_time: str = None,
price_cent: int = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# NFT发行成功的商品id
self.sku_id = sku_id
# C端用户的支付宝uid
self.to_id_no = to_id_no
# C端用户的支付宝账号类型
self.to_id_type = to_id_type
# 用户在商户购买的订单号,用作幂等字段
self.order_no = order_no
# 用户购买订单的时间
self.order_time = order_time
# 用户购买订单的价格,可以为0;用户的购买历史记录会展示
self.price_cent = price_cent
def validate(self):
self.validate_required(self.sku_id, 'sku_id')
self.validate_required(self.to_id_no, 'to_id_no')
self.validate_required(self.to_id_type, 'to_id_type')
self.validate_required(self.order_no, 'order_no')
self.validate_required(self.order_time, 'order_time')
if self.order_time is not None:
self.validate_pattern(self.order_time, 'order_time', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.to_id_no is not None:
result['to_id_no'] = self.to_id_no
if self.to_id_type is not None:
result['to_id_type'] = self.to_id_type
if self.order_no is not None:
result['order_no'] = self.order_no
if self.order_time is not None:
result['order_time'] = self.order_time
if self.price_cent is not None:
result['price_cent'] = self.price_cent
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('to_id_no') is not None:
self.to_id_no = m.get('to_id_no')
if m.get('to_id_type') is not None:
self.to_id_type = m.get('to_id_type')
if m.get('order_no') is not None:
self.order_no = m.get('order_no')
if m.get('order_time') is not None:
self.order_time = m.get('order_time')
if m.get('price_cent') is not None:
self.price_cent = m.get('price_cent')
return self
class ExecNftTransferResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: int = None,
nft_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT发行成功的商品id
self.sku_id = sku_id
# NFT发行成功的商品,其中转给C端用户的特定一个token
self.nft_id = nft_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.nft_id is not None:
result['nft_id'] = self.nft_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
return self
class SendNftTransferRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
project_id: str = None,
to_id_no: str = None,
to_id_type: str = None,
order_no: str = None,
order_time: str = None,
price_cent: int = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# NFT租户下唯一的项目编号
self.project_id = project_id
# 支付宝2088开头账号
self.to_id_no = to_id_no
# 账号类型,当前只支持支付宝账号
self.to_id_type = to_id_type
# 交易NFT时租户的唯一订单号
self.order_no = order_no
# 用户购买订单时间
self.order_time = order_time
# 购买NFT的金额,单位分
self.price_cent = price_cent
def validate(self):
self.validate_required(self.project_id, 'project_id')
self.validate_required(self.to_id_no, 'to_id_no')
self.validate_required(self.to_id_type, 'to_id_type')
self.validate_required(self.order_no, 'order_no')
self.validate_required(self.order_time, 'order_time')
if self.order_time is not None:
self.validate_pattern(self.order_time, 'order_time', '\\d{4}[-]\\d{1,2}[-]\\d{1,2}[T]\\d{2}:\\d{2}:\\d{2}([Z]|([\\.]\\d{1,9})?[\\+]\\d{2}[\\:]?\\d{2})')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.project_id is not None:
result['project_id'] = self.project_id
if self.to_id_no is not None:
result['to_id_no'] = self.to_id_no
if self.to_id_type is not None:
result['to_id_type'] = self.to_id_type
if self.order_no is not None:
result['order_no'] = self.order_no
if self.order_time is not None:
result['order_time'] = self.order_time
if self.price_cent is not None:
result['price_cent'] = self.price_cent
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('to_id_no') is not None:
self.to_id_no = m.get('to_id_no')
if m.get('to_id_type') is not None:
self.to_id_type = m.get('to_id_type')
if m.get('order_no') is not None:
self.order_no = m.get('order_no')
if m.get('order_time') is not None:
self.order_time = m.get('order_time')
if m.get('price_cent') is not None:
self.price_cent = m.get('price_cent')
return self
class SendNftTransferResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: str = None,
nft_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT商品编码
self.sku_id = sku_id
# 发放的NFT编码
self.nft_id = nft_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.nft_id is not None:
result['nft_id'] = self.nft_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
return self
class QueryNftCustomerRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
id_no: str = None,
id_type: str = None,
nft_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 用户支付宝2088账号编码,16位
self.id_no = id_no
# 用户的账户类型
self.id_type = id_type
# NFT资产唯一编码
self.nft_id = nft_id
def validate(self):
self.validate_required(self.id_no, 'id_no')
self.validate_required(self.id_type, 'id_type')
self.validate_required(self.nft_id, 'nft_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.id_no is not None:
result['id_no'] = self.id_no
if self.id_type is not None:
result['id_type'] = self.id_type
if self.nft_id is not None:
result['nft_id'] = self.nft_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('id_no') is not None:
self.id_no = m.get('id_no')
if m.get('id_type') is not None:
self.id_type = m.get('id_type')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
return self
class QueryNftCustomerResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: int = None,
nft_id: str = None,
sku_name: str = None,
author_name: str = None,
issuer_name: str = None,
mini_image_path: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT商品的商品编码
self.sku_id = sku_id
# NFT资产的唯一编码
self.nft_id = nft_id
# NFT商品的名称
self.sku_name = sku_name
# NFT的创作者名称
self.author_name = author_name
# NFT的发行方名称
self.issuer_name = issuer_name
# 缩略图url,带5分钟鉴权
self.mini_image_path = mini_image_path
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.nft_id is not None:
result['nft_id'] = self.nft_id
if self.sku_name is not None:
result['sku_name'] = self.sku_name
if self.author_name is not None:
result['author_name'] = self.author_name
if self.issuer_name is not None:
result['issuer_name'] = self.issuer_name
if self.mini_image_path is not None:
result['mini_image_path'] = self.mini_image_path
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
if m.get('sku_name') is not None:
self.sku_name = m.get('sku_name')
if m.get('author_name') is not None:
self.author_name = m.get('author_name')
if m.get('issuer_name') is not None:
self.issuer_name = m.get('issuer_name')
if m.get('mini_image_path') is not None:
self.mini_image_path = m.get('mini_image_path')
return self
class QueryNftOrderRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
id_no: str = None,
id_type: str = None,
order_no: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 支付宝的2088账号
self.id_no = id_no
# 支付账号类型,该版本仅支持支付宝账号
self.id_type = id_type
# 租户的唯一映射订单号
self.order_no = order_no
def validate(self):
self.validate_required(self.id_no, 'id_no')
self.validate_required(self.id_type, 'id_type')
self.validate_required(self.order_no, 'order_no')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.id_no is not None:
result['id_no'] = self.id_no
if self.id_type is not None:
result['id_type'] = self.id_type
if self.order_no is not None:
result['order_no'] = self.order_no
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('id_no') is not None:
self.id_no = m.get('id_no')
if m.get('id_type') is not None:
self.id_type = m.get('id_type')
if m.get('order_no') is not None:
self.order_no = m.get('order_no')
return self
class QueryNftOrderResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sku_id: int = None,
nft_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# NFT商品编码
self.sku_id = sku_id
# NFT资产的唯一id
self.nft_id = nft_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sku_id is not None:
result['sku_id'] = self.sku_id
if self.nft_id is not None:
result['nft_id'] = self.nft_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sku_id') is not None:
self.sku_id = m.get('sku_id')
if m.get('nft_id') is not None:
self.nft_id = m.get('nft_id')
return self
class CreateNftIssuerRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
project_id: str = None,
sku_name: str = None,
sku_type: str = None,
quantity: int = None,
author: str = None,
issuer_name: str = None,
sku_descrption: str = None,
jump_url: str = None,
biz_type: str = None,
files: List[File] = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 租户发行NFT的唯一编号
self.project_id = project_id
# NFT商品的名称
self.sku_name = sku_name
# NFT资产类型, 含IMAGE、VIDEO、FILE、AUDIO、THREE_DIMENSIONS等
self.sku_type = sku_type
# 铸造数量,100起
self.quantity = quantity
# NFT创作者
self.author = author
# 发行方名称
self.issuer_name = issuer_name
# NFT商品的描述
self.sku_descrption = sku_descrption
# 跳转链接
self.jump_url = jump_url
# 业务类型:PAY_CODE,DEFAULT,EUROPEAN_CUP,TAOBAO_AUCTION,NFT_CUSTOMIZE(NFT定制)
self.biz_type = biz_type
# 素材文件结构
self.files = files
def validate(self):
self.validate_required(self.project_id, 'project_id')
self.validate_required(self.sku_name, 'sku_name')
self.validate_required(self.sku_type, 'sku_type')
self.validate_required(self.quantity, 'quantity')
self.validate_required(self.author, 'author')
self.validate_required(self.issuer_name, 'issuer_name')
self.validate_required(self.sku_descrption, 'sku_descrption')
self.validate_required(self.files, 'files')
if self.files:
for k in self.files:
if k:
k.validate()
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.project_id is not None:
result['project_id'] = self.project_id
if self.sku_name is not None:
result['sku_name'] = self.sku_name
if self.sku_type is not None:
result['sku_type'] = self.sku_type
if self.quantity is not None:
result['quantity'] = self.quantity
if self.author is not None:
result['author'] = self.author
if self.issuer_name is not None:
result['issuer_name'] = self.issuer_name
if self.sku_descrption is not None:
result['sku_descrption'] = self.sku_descrption
if self.jump_url is not None:
result['jump_url'] = self.jump_url
if self.biz_type is not None:
result['biz_type'] = self.biz_type
result['files'] = []
if self.files is not None:
for k in self.files:
result['files'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
if m.get('sku_name') is not None:
self.sku_name = m.get('sku_name')
if m.get('sku_type') is not None:
self.sku_type = m.get('sku_type')
if m.get('quantity') is not None:
self.quantity = m.get('quantity')
if m.get('author') is not None:
self.author = m.get('author')
if m.get('issuer_name') is not None:
self.issuer_name = m.get('issuer_name')
if m.get('sku_descrption') is not None:
self.sku_descrption = m.get('sku_descrption')
if m.get('jump_url') is not None:
self.jump_url = m.get('jump_url')
if m.get('biz_type') is not None:
self.biz_type = m.get('biz_type')
self.files = []
if m.get('files') is not None:
for k in m.get('files'):
temp_model = File()
self.files.append(temp_model.from_map(k))
return self
class CreateNftIssuerResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
task_id: str = None,
project_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 发行任务编号
self.task_id = task_id
# 业务方发行NFT对应的唯一项目编号
self.project_id = project_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.task_id is not None:
result['task_id'] = self.task_id
if self.project_id is not None:
result['project_id'] = self.project_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('task_id') is not None:
self.task_id = m.get('task_id')
if m.get('project_id') is not None:
self.project_id = m.get('project_id')
return self
class PagequeryNftCustomerRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
product_instance_id: str = None,
page: int = None,
page_size: int = None,
id_no: str = None,
id_type: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
self.product_instance_id = product_instance_id
# 当前页码
self.page = page
# 页长
self.page_size = page_size
# 用户手机号或支付宝UID
self.id_no = id_no
# 用户ID类型,和id_no对应
self.id_type = id_type
def validate(self):
self.validate_required(self.page, 'page')
self.validate_required(self.page_size, 'page_size')
self.validate_required(self.id_no, 'id_no')
self.validate_required(self.id_type, 'id_type')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.product_instance_id is not None:
result['product_instance_id'] = self.product_instance_id
if self.page is not None:
result['page'] = self.page
if self.page_size is not None:
result['page_size'] = self.page_size
if self.id_no is not None:
result['id_no'] = self.id_no
if self.id_type is not None:
result['id_type'] = self.id_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('product_instance_id') is not None:
self.product_instance_id = m.get('product_instance_id')
if m.get('page') is not None:
self.page = m.get('page')
if m.get('page_size') is not None:
self.page_size = m.get('page_size')
if m.get('id_no') is not None:
self.id_no = m.get('id_no')
if m.get('id_type') is not None:
self.id_type = m.get('id_type')
return self
class PagequeryNftCustomerResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
page: int = None,
page_size: int = None,
total_count: int = None,
asset_list: List[UserAsset] = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 页码,查询时传入
self.page = page
# 页长,查询时传入
self.page_size = page_size
# 列表总数
self.total_count = total_count
# 用户资产列表
self.asset_list = asset_list
def validate(self):
if self.asset_list:
for k in self.asset_list:
if k:
k.validate()
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.page is not None:
result['page'] = self.page
if self.page_size is not None:
result['page_size'] = self.page_size
if self.total_count is not None:
result['total_count'] = self.total_count
result['asset_list'] = []
if self.asset_list is not None:
for k in self.asset_list:
result['asset_list'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('page') is not None:
self.page = m.get('page')
if m.get('page_size') is not None:
self.page_size = m.get('page_size')
if m.get('total_count') is not None:
self.total_count = m.get('total_count')
self.asset_list = []
if m.get('asset_list') is not None:
for k in m.get('asset_list'):
temp_model = UserAsset()
self.asset_list.append(temp_model.from_map(k))
return self
| 35.114035
| 170
| 0.579399
| 6,653
| 48,036
| 3.937171
| 0.040884
| 0.053256
| 0.095862
| 0.068489
| 0.804497
| 0.749828
| 0.731809
| 0.720509
| 0.707376
| 0.70352
| 0
| 0.002343
| 0.315888
| 48,036
| 1,367
| 171
| 35.139722
| 0.794748
| 0.041677
| 0
| 0.812943
| 1
| 0.00266
| 0.105933
| 0.007782
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067376
| false
| 0.007092
| 0.003546
| 0
| 0.121454
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
cee309d81ac58f957e8333509009c3a149f636cb
| 10,616
|
py
|
Python
|
spam_v1/spam_encrypted.py
|
Alpha-Demon404/RE-14
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 39
|
2020-02-26T09:44:36.000Z
|
2022-03-23T00:18:25.000Z
|
spam_v1/spam_encrypted.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 15
|
2020-05-14T10:07:26.000Z
|
2022-01-06T02:55:32.000Z
|
spam_v1/spam_encrypted.py
|
B4BY-DG/reverse-enginnering
|
b5b46a9f0eee218f2a642b615c77135c33c6f4ad
|
[
"MIT"
] | 41
|
2020-03-16T22:36:38.000Z
|
2022-03-17T14:47:19.000Z
|
# Time Succses Parser : Mon Jun 15 19:42:22 2020
# Auto Parser Dis Version : 1.1.0
# Source : https://www.github.com/Datez-Kun
import hashlib, base64,os
#from s import *
def hasher(text, length, key):
if length > 64:
raise ValueError('hash length should be lower than 64')
result = hashlib.sha256(text + key + text).hexdigest()[:length][::-1]
return result
def separator(text, length):
return [ text[i:i + length] for i in range(0, len(text), int(length)) ]
def decrypt(text, key):
textsplit = text.split('!-!')
encrypted, shuffled, hash_length, separate_length = textsplit[0].split('|')
encrypted = separator(encrypted, int(hash_length))
encrypted2 = separator(('').join(encrypted), int(hash_length))
shuffled = separator(shuffled, int(separate_length))
primary_key_is_true = True
for i in shuffled:
hashed = hasher(i, int(hash_length), key)
if hashed in encrypted:
encrypted[encrypted.index(hashed)] = i
for i in encrypted:
if i in encrypted2 and len(textsplit) == 1:
os.remove(__file__)
sys.exit()
elif i in encrypted2:
primary_key_is_true = False
break
if primary_key_is_true: # key is true
result = base64.b64decode(('').join(encrypted)[::-1])
print result
if len(textsplit) >= 2 and primary_key_is_true == False: # Key Is False
master_key = separator(textsplit[1], int(hash_length))
master_key2 = separator(('').join(master_key), int(hash_length))
for i in shuffled:
hashed = hasher(i, int(hash_length), key)
if hashed in master_key:
master_key[master_key.index(hashed)] = i
for i in master_key:
if i in master_key2:
os.remove(__file__)
sys.exit()
result = base64.b64decode(('').join(master_key)[::-1])
return result
def unlock(key):
# exec
decrypt('2c89e676b33325c0fffe657007a7f04527e734aaf2f94349cf9332ceb2f244cd8683b66d1cefc1e79e5198b382bb1c62f02cc234192546384b6f7278a2a9db1f81263384bcc1d1bcfdaaad773efbbbc6de52d144d3a99ea9cb9d43023b3b0cb1a2489f4620150efbf3939af85eb3a3f727b55fc3ca732b1a968b6ca245b5d5f830f4da7dd99df915f32699f2d6c384d15eed138e6b6d20e198d65999cd485a1b5888826ec73fcab1f3939af85eb3a3f776af43aac999c579140c5e71abca06214c0e5a42ecbd088b3c9313eabe46f47b71cd8c4012eea355df6829523159b74615ebf4867f042e28fc4bf2284c48a08c9ef3e32222fd9e0794e03b209e94075f4172e7ea447d3cf9a210fab7c18f19474126ab7bc5bb588cd694d468e545e646d12695fcda107bda752a2edbce55b2e57676b056db213fb7369d14a47177da8df93be1f321e97d38072e93cba32b092e89b66ec9594ac4e7493d584400a0ea8124b245f1cfcc47daea795cf1f5076fb921050b0397bbfaab3eb0cb89aa2883aaf056c14aac3fc29a38f0ebbb7f78121a1184cf05e52bfa8e760b7f71cd413514878477967eeea0bbc45353db52e4d1a4a51087678523838701969ae7fcaddd44cd7b32f2dcc91158f29227d9ccb6d77bd2dc34db49d623541067ed7c01e593a1df2d51083818711be1b1541fd0b09c71ac7c16e4679989301067ed7c01e593a1ade19b67c372b9b949f739355a2970acd5ed8f6345c41f11493d584400a0ea81c49f7b9dee743cb4fc4bf2284c48a08cded0e374d21026dae76f8acddaab4c2c8b74d94b5cbc3aae3e59dbb3f646e6d12fd4bb752db26add3f2ce01a5ff7039c54322cadd286fc4ad87b7040f9cac998d09c736f10ec208a8b97cbef9e532f80dd82175a2212f0a3f32699f2d6c384d1516a6f0421843fb2331824e1f5bae5717f47de06b634f966b79e7f1aa8132930c58fe5c220383467140c5e71abca0621c63b198bbda42a98a3df49a645ba2448261c3a3aa612e059dbfe74fcbdf1e1a259111e643f7836b714ffc9031c29a51b1e692810ca044973930bd6d09f77c0c8440dd2908a9552cd37b594eb81a824b35f2336cd381aca03d89818a0b39b473fade19b67c372b9b94edeb7d321c2f11e3f53e36bdfd61cb39ea3ea5c8c2cd8314e73dccc9bfc32f6238ed07e7864dbdf7aca8385c17d6e00eb7f33c5d5e2b39b18f54df8d28ace8470a9359c377f4595d2dc34db49d62354968b6ca245b5d5f8e563735fdaa0b13af0b0aa182477415ed4f598e63cc535b116c13c19d888f1d4c9e8bc2958e4a1169ea3ea5c8c2cd831bafb3e140db040c6f96482640669ebc2190aa61b9f15af2e5dc8b72d77fb458ff617fcbb96fb99a393ee982decf3c8dd875e0b90344bd3d5955fd85544ad1a818d7c55a74d49028f94e03b209e94075fa637fcca2549b310c80ea270917a1a8eec6af458686118be2b3636ecdd3e0d474afc2309e6d2e5eceb20fc5ca9fe8476336f90205b0c8a5159d33bfc1762101e15ebf4867f042e28f764c1e2ea6dd8a8de52d144d3a99ea9450074ac89f78f8dddb8692dd51bd8c94e3c4c3475ae45bf27b55fc3ca732b1aa857cb7c52fe406c4a2fae388da6939da46c5da9e418aa9bde52d144d3a99ea9555bb9af5fda01b4f34b8f6ce1bfa195955fd85544ad1a81e6e491a49fec3e27140c5e71abca0621959c5ed0822eb507f32699f2d6c384d1d4f598e63cc535b1cb9d43023b3b0cb1c45353db52e4d1a4b2a19cf7bdb84cce6ffcadfa9509c312e06aa5c7eeb083562c903f1763f69e040e48846a2f13cd71d79fe8914d57c85243971dd7ab6ddf96f227157550f9691fa1bfb7e1ad42b5a0603449ef1ee2f91401969ae7fcaddd44ade19b67c372b9b949f739355a2970ac74c28f00cb25b321a430eeb97b3d164c1290bf1df10c41b1c45353db52e4d1a47a9b83a676068b6b7860af5ff786b8c75ac529f24c6ebbd3a3df49a645ba2448a07e066c6878de616a186703a6163324ca666649e3df3d38ea795cf1f5076fb9fe0e5ef4553cba12968b6ca245b5d5f8028b95bcd9b4b6b2760b7f71cd413514a46c5da9e418aa9bb1bf49311dc3b8465b249d1351cda5c1eb20fc5ca9fe84767002ed5e92f9812c39693cfef8a699570a4019c0a015d66189a3579cc14b1ae49b3b6fa08b5a2b11331824e1f5bae571ce4478284ea845e1eb20fc5ca9fe847651027157b65c1ac21f52b6a5f6d47d57d6b1f43df41a9edad5ed8f6345c41f11b28eba0d1671944116c13c19d888f1d45c555129cd2e0068317bfb9397eefbb6c58fe5c220383467238ed07e7864dbdf2c12347b1dbb4f79b3dbb1525f69c32ff617fcbb96fb99a3a151482295ecb3a6bbd0febd18e41eaf752a2edbce55b2e5bd2f97a3f6ad6440930bd6d09f77c0c803c097e3128fa730cd7b32f2dcc911588dafc51b1358f34011c25eb2717d8ee75540e18817b40845290a31d30b1360c097230f750b92ea94da94b635bb36c38f4b2cd4656a7e8e6b1602a8375424bca39bcde1711b76eac793ee982decf3c8dd878477967eeea0bb45d26c0aff06d59a04f615a7e134e3c5968b6ca245b5d5f82c903f1763f69e04f49f88d544141ced9bcde1711b76eac7eb3d47c932fd8fa32142cbfe3af4e064955fd85544ad1a813d5129fce2ab5811930bd6d09f77c0c8440dd2908a9552cdfc2279fc958e4ef8e2b3146500e392269ffb02c6394050224eeb7c5fa0749c93aa664aedfd72f86bd5cd72e00de29d8e8112744c5b6974a0d09d5e9f4d15e4adb461a64d9ffc661db28eba0d16719441d08d7afef9e72f077b0c047fb671e1befc4bf2284c48a08c744b4bf82099b1227cbd49988407b8456753cf191d165743df6829523159b74662cc658e8870275a319c5d3f271aef8ffea4fe15193ef5a2fc4bf2284c48a08c9ef3e32222fd9e07c3bc6485a284fcaee5492ca443ebf4f42bc90746ef8058c2f135ce8051877c86b486cfc272cd645df056c14aac3fc29a4e3c4c3475ae45bf0043d0c1a21db9378112744c5b6974a0580a7ec4fff4f1ba713299afe225607cf227157550f9691f8c4f4ce428f81e56fea4fe15193ef5a214ffc9031c29a51bded0e374d21026da25d84bcd4d48b7ee90ab983d200c9c6131f69f16217205352fd4bb752db26addb1bf49311dc3b84692fe159c381caa7a1b49a3cf2cdbef1c4db1bf6ab21afa70c3bc6485a284fcae6fed306b060e7252945a358a2f80950913f8b933e0b852cd804f3b3bf772f2d1a4841e54d3ffb0653705a7e143b858830ea3d37f120087af140c5e71abca0621b9badbe64525c8a3006908fe9c448fcf81d4c891bb9e13bf43971dd7ab6ddf96ae98c501fb558ea5f3939af85eb3a3f7bf268c27102a9023968b6ca245b5d5f80a4019c0a015d66128dc78377305f7179b3b6fa08b5a2b1111c25eb2717d8ee74afc2309e6d2e5ecf0b0aa182477415ec2a792ac35bb8957930bd6d09f77c0c82492aba8d2b67dc027c3600f8daf79e48a8b93ea494ea0dfe970f00cb3c0bddf120e550c45a1d457752a2edbce55b2e566aee981a353319f39693cfef8a699572c903f1763f69e04072e93cba32b092e261c3a3aa612e059dbfe74fcbdf1e1a215e3e8a83bf3ad07e1b1541fd0b09c712bdf332c073568ea40ef62c492f3a04f6753cf191d1657431e68815fa837760b5a14de7070a0cf99345e4f8d72ab86949ef3e32222fd9e074edeb7d321c2f11e675c5fe586de1ac01d10e072e52b0a4fa9e058e7f1d3d070e970f00cb3c0bddf68416c4de22b92de09c4bcb825b4f0b84afc2309e6d2e5ec3ffec5c52acaf910fe0e5ef4553cba121067ed7c01e593a1d09d5e9f4d15e4ad7b98437e84102efd9a8b2aa048e68f81b81fe33bf3c28c61a2489f4620150efbea795cf1f5076fb9e278b45fd6cbd4aee92a9417432471eadd82175a2212f0a36fee96de40fcf9adde52d144d3a99ea916c13c19d888f1d47938807a37155723fc4bf2284c48a08c9ef3e32222fd9e07c3bc6485a284fcae81263384bcc1d1bc90d8ed46d1ea5b1311b47eb7920e83e65d579d5ce2f72988ade19b67c372b9b99894eb2628c6de7e590ab05d60d435b1512e454181dc332fa0d66f438a259ac7802cc5fa6a1114f9372f616899dcc76942bf3f307ebdc2b9f325b21e0d15f0465eed138e6b6d20e1fcd152037810ddc630e4c574db8aa4a1819e06b93cfe98313339382da5fca133fc3a386d7486b774a10fb2c6fdf297dd450074ac89f78f8ddf6829523159b746a752a3a228d995110391150694b8e5b3819e06b93cfe98313339382da5fca133fc3a386d7486b77495adf39cab1a96cb512e454181dc332f60ea040691ad0c7dbac30b5fd3d1766af0b0aa182477415e|Wb6pUbypGaxYlSTTFajhkbWlGO2S2YEaHZkVjNzTDVUt5ALlIpuJvRyY2xUSmd5kWNBdXFJN0LmcGIzoNVUM1VjN1aDZESFbmaDbVO1VQK1bidhJ2UwJXR0ZmQHx1VFbWSPRWSYpZhmY2YWZlQshkNixtZhdVVwMFd1RkRAN1RiNzYzAGSEMvdDTHVFekJXFIJiJ2RadkN0FXWaNWdkS2RaNtlpVkV6l6NTW1sGSyADWLN2crB2YWpTJLN1s2VrVtow15lXW5JwMhh5lG5UUFWvNVNTdjO0ZmWGW3xOp0JaRlTIJGJXVGZWMFeOBGeUpnllVLdhJXWlpWdkYESVbGTFRHhHclU1s2MXMWejU0QCNCxBdSdUOjdKx==GWmWkhWanYDM0JGVnWMN2YGb1pvtnYi1zdjVolWb2STpmTkQmR0QptlQkdFWFM1IjSnY1V1sXSFWLhlWGNkUK5jVXWUFGSt5GFERUdjJ0k0UjdywptTZiNVdUNwlpxSZyY2UWakhlTkS0U0c2cVMZdiNEblQzS1NmYDMzNYZmN3bmMi5DaLR0VykUtTNph5UptGeFbwsXZKxXdVRzZUaNZsN5p1clbjJmWoU2VPNjSmVahtJHxZdlVSYzIUNTbESWSidkdPlzI6xWQoRxkWawZ3YzYKRHx3SiNwkYla1xgza0lbd5M1ZHZnWp0KNsNjTVSjTwx1V4VWbXRTVwMGK5J6RUeUMjY2YzlUV5A2RwkERwQIFYdOpnYZlDdK9tx3T0NWVXS0kGeDdZh0Qpt6pXY|16|2', key) in locals()
if '__main__' == __name__:
unlock('e20e1040eb5b81e3db7a14b6cab2754c575ccbeb6c1405d67088e9434c699ce9ea601e8ec14357b66b7bfd6b3dbe07a2c84790d4d7a2d523f75377aaccdd09e5')
# unlock(eval(marshal.loads
#exe=(('').join(iV2obywS(iV2obywS(iV2obywS(iV2obywS([aqCDXS, kp4kF5, swAdU3, oKPAJpQ, tNXQh, oA6d7T, ikDRT, nWw2sl5S, oLat1h, nyfg1Wxe, qh30Fr, ilgsdJC, efwipxL7, an5MK, mQIRnp, fWKn0, meh5v0Pf, ox4kOeRX, lFjQbE, zIGri28T, zSlIDy0, kBoE60H, cgDp7XTH, k6hFOYMn, s4eBbPFS, o61TX, nGU7J, zrS275, qD8QxIJt, r2Cpt, yTMBPF, zBF4gY, zVOU3T, adbi1Ql, xxnOw, gFtY8, vdXoGM, pA8SxG, soxpbtEP, xjXMd, pf6rJh, ekMWSEK4, cfapI1, piM2jb3, b1BRhsA, hK0fox, e5KxDyp, faMBAyE, gfpE2WLy, ztGnl, eEymwf6j, aNBTH, t8SJV, yXaFqD, xH1Pif, h82kTWAQ, lv1Pj, w0rOpQs, pPjgKEk, dkW4bnth, vaPwpF, ef8Pn, s4IAOij, sr5FhbQ, t7B5LGJ, vNstXuE, bRK3j, cLtrx4B, gpLoG5F, mfIkYs, qVCyDu, fqCBT7Jy, xcMHFyQ, kI0OG, vgrXEKpO, vXmAGj, yBCHl, z3UhEQcA, eI4i2HaD, jWgeKl, lRDwlhcu, j41kv3, eemgR, qvdbB, xltWh2E, xxEKn, y3vfw, jkpY6wA, sfUVri, wi6G45At, seSrO, xy7bB, c6fRlc, agaMBUs, gbeNG, gOTYL, i0t8emq4, s7FxoH1, mUGOFd, gXfi2P, dAqmMPR, bSmq8, vAKMpGe, oVpLXxfh, aMon8NrQ, jJIHy, tg6Vi, moayFp, ihA7buE, qxPKEmt, hqcaWL, zJLFhkP, bcMxoQW, u2Axl, kHvRQ, gjH1T, wGOcpC, ylsGkKFA, hMkPSO, hcanHMEi, nRxohCi, yypaOFg, pOy8v, jgYdyCs, qP7yJ, qL34mR, mcwkB5, zUbpG, v8YsG, cpUmb, rP8x3GE1, gvPmYD, fI7cSu, kxGby, folfqA, n3HExk, pfktPFAB, wvacykPL, aDeuX, sRbWf, feODw, uQmeBWdP, yfAadGBQ, vG1Xt, sxfEKeXa, dtkaS, yCULaJ, xOHkUrVv, i64Ob, gD81J]))))))
#))
#open('key.pyc','wb').write(magic+exe)
| 153.855072
| 7,094
| 0.892803
| 442
| 10,616
| 21.339367
| 0.563348
| 0.008482
| 0.00827
| 0.006785
| 0.023537
| 0.015055
| 0.011238
| 0.011238
| 0.011238
| 0.011238
| 0
| 0.42162
| 0.064996
| 10,616
| 68
| 7,095
| 156.117647
| 0.528612
| 0.145818
| 0
| 0.217391
| 0
| 0
| 0.799139
| 0.793949
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021739
| null | null | 0.021739
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
303f309f1a96fde9103fc355e28dd93a9662acda
| 8,330
|
py
|
Python
|
image_export/to_png.py
|
karensuzue/Maze
|
a9c613323d24d115279b1892a33036acef7b715e
|
[
"MIT"
] | null | null | null |
image_export/to_png.py
|
karensuzue/Maze
|
a9c613323d24d115279b1892a33036acef7b715e
|
[
"MIT"
] | null | null | null |
image_export/to_png.py
|
karensuzue/Maze
|
a9c613323d24d115279b1892a33036acef7b715e
|
[
"MIT"
] | null | null | null |
import media
from PIL import Image
from PIL import ImageDraw
from grid import Grid
from grid import Cell
from image_export.dijkstra import Dijkstra
class ToPNG():
def __init__(self, grid, cell_size):
self.grid = grid
self.cell_size = cell_size
self.image_size = (cell_size * grid.cols, cell_size * grid.rows)
self.wall_color = (0, 0, 0)
self.bg_color = (255, 255, 255)
self.dark_bg_color = ()
def render_path(self):
"""
Obtain and renders the solution path
:return: a list of solution path cells
"""
# Start solving from the southwest corner
distance_map = Dijkstra()
distance_map.solve(self.grid, self.grid.rows - 1, 0)
# Walk backwards from end goal, which is northeast corner
goal = self.grid.grid[0][self.grid.cols - 1]
# Find solution path
solution_path = [goal]
temp = goal
while temp.distance is not 1:
links = temp.links
for cell in links:
if cell.distance == temp.distance - 1:
solution_path.append(cell)
temp = cell
# Begin drawing maze:
canvas = Image.new('RGB', self.image_size, self.bg_color)
draw = ImageDraw.Draw(canvas)
# Fill cell from solution path with red:
color = (255, 0, 0)
for i in range(len(solution_path)):
c = solution_path[i].col
r = solution_path[i].row
# Northwest corner
x1 = c * self.cell_size
y1 = r * self.cell_size
# Southeast corner
x2 = (c + 1) * self.cell_size
y2 = (r + 1) * self.cell_size
draw.rectangle([x1, y1, x2, y2], fill=color)
# Draw borders:
draw.line((0, 0, 0, self.image_size[1] - 1),
fill=self.wall_color, width=1) # Left
draw.line((self.image_size[0] - 1, 0,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Right
draw.line((0, 0, self.image_size[1] - 1, 0),
fill=self.wall_color, width=1) # Top
draw.line((0, self.image_size[1] - 1,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Bottom
# Draw maze:
for r in range(self.grid.rows):
for c in range(self.grid.cols):
cell = self.grid.grid[r][c]
# Northwest corner
x1 = c * self.cell_size
y1 = r * self.cell_size
# Southeast corner
x2 = (c + 1) * self.cell_size
y2 = (r + 1) * self.cell_size
# north = self.grid.get_north(cell)
south = self.grid.get_south(cell)
east = self.grid.get_east(cell)
# west = self.grid.get_west(cell)
if not cell.exist_link(east):
draw.line((x2, y1, x2, y2), fill=self.wall_color, width=1)
if not cell.exist_link(south):
draw.line((x1, y2, x2, y2), fill=self.wall_color, width=1)
canvas.save("maze-solution.png")
return solution_path
def render(self):
"""
Render a plain maze.
:return: an image of a maze
"""
canvas = Image.new('RGB', self.image_size, self.bg_color)
draw = ImageDraw.Draw(canvas)
# Draw borders:
draw.line((0, 0, 0, self.image_size[1] - 1),
fill=self.wall_color, width=1) # Left
draw.line((self.image_size[0] - 1, 0,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Right
draw.line((0, 0, self.image_size[1] - 1, 0),
fill=self.wall_color, width=1) # Top
draw.line((0, self.image_size[1] - 1,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Bottom
# Draw maze:
for r in range(self.grid.rows):
for c in range(self.grid.cols):
cell = self.grid.grid[r][c]
# Northwest corner
x1 = c * self.cell_size
y1 = r * self.cell_size
# Southeast corner
x2 = (c + 1) * self.cell_size
y2 = (r + 1) * self.cell_size
# north = self.grid.get_north(cell)
south = self.grid.get_south(cell)
east = self.grid.get_east(cell)
# west = self.grid.get_west(cell)
if not cell.exist_link(east):
draw.line((x2, y1, x2, y2), fill=self.wall_color, width=1)
if not cell.exist_link(south):
draw.line((x1, y2, x2, y2), fill=self.wall_color, width=1)
canvas.save("maze.png")
return canvas
def render_color(self):
"""
Render a colored bias map.
:return: an image of a colored maze
"""
canvas = Image.new('RGB', self.image_size, self.bg_color)
draw = ImageDraw.Draw(canvas)
# Start from the middle of the maze
distance_map = Dijkstra()
distance_map.solve(self.grid, self.grid.rows // 2 - 1,
self.grid.cols // 2 - 1)
# Fill cell with color:
for r in range(self.grid.rows):
for c in range(self.grid.cols):
cell = self.grid.grid[r][c]
color = distance_map.color_for_cell(cell)
# Northwest corner
x1 = c * self.cell_size
y1 = r * self.cell_size
# Southeast corner
x2 = (c + 1) * self.cell_size
y2 = (r + 1) * self.cell_size
draw.rectangle([x1, y1, x2, y2], fill=color)
# Draw maze outline:
for r in range(self.grid.rows):
for c in range(self.grid.cols):
cell = self.grid.grid[r][c]
# Northwest corner
x1 = c * self.cell_size
y1 = r * self.cell_size
# Southeast corner
x2 = (c + 1) * self.cell_size
y2 = (r + 1) * self.cell_size
# north = self.grid.get_north(cell)
south = self.grid.get_south(cell)
east = self.grid.get_east(cell)
# west = self.grid.get_west(cell)
if not cell.exist_link(east):
draw.line((x2, y1, x2, y2), fill=self.wall_color, width=1)
if not cell.exist_link(south):
draw.line((x1, y2, x2, y2), fill=self.wall_color, width=1)
# Draw borders:
draw.line((0, 0, 0, self.image_size[1] - 1),
fill=self.wall_color, width=1) # Left
draw.line((self.image_size[0] - 1, 0,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Right
draw.line((0, 0, self.image_size[1] - 1, 0),
fill=self.wall_color, width=1) # Top
draw.line((0, self.image_size[1] - 1,
self.image_size[0] - 1,
self.image_size[1] - 1), fill=self.wall_color,
width=1) # Bottom
canvas.save("maze-colored.png")
return canvas
def render_path2(self):
"""
Obtain the solution path without rendering.
:return: a list of cells in solution path
"""
# Start solving from the southwest corner
distance_map = Dijkstra()
distance_map.solve(self.grid, self.grid.rows - 1, 0)
# Walk backwards from end goal, which is northeast corner
goal = self.grid.grid[0][self.grid.cols - 1]
# Find solution path
solution_path = [goal]
temp = goal
while temp.distance is not 1:
links = temp.links
for cell in links:
if cell.distance == temp.distance - 1:
solution_path.append(cell)
temp = cell
return solution_path
| 35.446809
| 78
| 0.504922
| 1,075
| 8,330
| 3.79907
| 0.102326
| 0.072478
| 0.089128
| 0.074927
| 0.806317
| 0.786729
| 0.786729
| 0.786729
| 0.786729
| 0.786729
| 0
| 0.036307
| 0.384994
| 8,330
| 234
| 79
| 35.598291
| 0.760882
| 0.136375
| 0
| 0.812081
| 0
| 0
| 0.007116
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033557
| false
| 0
| 0.040268
| 0
| 0.107383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3041d0ec2c18eb5e999e0cb98e007de2f2408216
| 487,757
|
py
|
Python
|
fdk_client/platform/PlatformApplicationClient.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/PlatformApplicationClient.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
fdk_client/platform/PlatformApplicationClient.py
|
kavish-d/fdk-client-python
|
a1023eb530473322cb52e095fc4ceb226c1e6037
|
[
"MIT"
] | null | null | null |
"""Platform Client."""
from ..common.aiohttp_helper import AiohttpHelper
from ..common.utils import create_url_with_params, create_query_string, get_headers_with_signature, create_url_without_domain
from .models.CommonValidator import CommonValidator
from .models.LeadValidator import LeadValidator
from .models.FeedbackValidator import FeedbackValidator
from .models.ThemeValidator import ThemeValidator
from .models.UserValidator import UserValidator
from .models.ContentValidator import ContentValidator
from .models.BillingValidator import BillingValidator
from .models.CommunicationValidator import CommunicationValidator
from .models.PaymentValidator import PaymentValidator
from .models.OrderValidator import OrderValidator
from .models.CatalogValidator import CatalogValidator
from .models.CompanyProfileValidator import CompanyProfileValidator
from .models.FileStorageValidator import FileStorageValidator
from .models.ShareValidator import ShareValidator
from .models.InventoryValidator import InventoryValidator
from .models.ConfigurationValidator import ConfigurationValidator
from .models.CartValidator import CartValidator
from .models.RewardsValidator import RewardsValidator
from .models.AnalyticsValidator import AnalyticsValidator
from .models.DiscountValidator import DiscountValidator
from .models.PartnerValidator import PartnerValidator
from .models.WebhookValidator import WebhookValidator
class Common:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class Lead:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getTickets(self, items=None, filters=None, q=None, status=None, priority=None, category=None):
"""Gets the list of Application level Tickets and/or ticket filters
:param items : Decides that the reponse will contain the list of tickets : type boolean
:param filters : Decides that the reponse will contain the ticket filters : type boolean
:param q : Search through ticket titles and description : type string
:param status : Filter tickets on status : type string
:param priority : Filter tickets on priority : type
:param category : Filter tickets on category : type string
"""
payload = {}
if items:
payload["items"] = items
if filters:
payload["filters"] = filters
if q:
payload["q"] = q
if status:
payload["status"] = status
if priority:
payload["priority"] = priority
if category:
payload["category"] = category
# Parameter validation
schema = LeadValidator.getTickets()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for which the data will be returned","required":true,"schema":{"type":"string"}}],"optional":[{"name":"items","in":"query","description":"Decides that the reponse will contain the list of tickets","schema":{"type":"boolean"}},{"name":"filters","in":"query","description":"Decides that the reponse will contain the ticket filters","schema":{"type":"boolean"}},{"name":"q","in":"query","description":"Search through ticket titles and description","schema":{"type":"string"}},{"name":"status","in":"query","description":"Filter tickets on status","schema":{"type":"string"}},{"name":"priority","in":"query","description":"Filter tickets on priority","schema":{"$ref":"#/components/schemas/PriorityEnum"}},{"name":"category","in":"query","description":"Filter tickets on category","schema":{"type":"string"}}],"query":[{"name":"items","in":"query","description":"Decides that the reponse will contain the list of tickets","schema":{"type":"boolean"}},{"name":"filters","in":"query","description":"Decides that the reponse will contain the ticket filters","schema":{"type":"boolean"}},{"name":"q","in":"query","description":"Search through ticket titles and description","schema":{"type":"string"}},{"name":"status","in":"query","description":"Filter tickets on status","schema":{"type":"string"}},{"name":"priority","in":"query","description":"Filter tickets on priority","schema":{"$ref":"#/components/schemas/PriorityEnum"}},{"name":"category","in":"query","description":"Filter tickets on category","schema":{"type":"string"}}],"headers":[]}""", items=items, filters=filters, q=q, status=status, priority=priority, category=category)
query_string = await create_query_string(items=items, filters=filters, q=q, status=status, priority=priority, category=category)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket", items=items, filters=filters, q=q, status=status, priority=priority, category=category), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getTicket(self, id=None):
"""Retreives ticket details of a application level ticket with ticket ID
:param id : Tiket ID of the ticket to be fetched : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = LeadValidator.getTicket()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for which the data will be returned","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Tiket ID of the ticket to be fetched","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def editTicket(self, id=None, body=""):
"""Edits ticket details of a application level ticket such as status, priority, category, tags, attachments, assigne & ticket content changes
:param id : Ticket ID of ticket to be edited : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = LeadValidator.editTicket()
schema.dump(schema.load(payload))
# Body validation
from .models.EditTicketPayload import EditTicketPayload
schema = EditTicketPayload()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for ticket","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Ticket ID of ticket to be edited","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def createHistory(self, id=None, body=""):
"""Create history for specific application level ticket, this history is seen on ticket detail page, this can be comment, log or rating.
:param id : Ticket ID for which history is created : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = LeadValidator.createHistory()
schema.dump(schema.load(payload))
# Body validation
from .models.TicketHistoryPayload import TicketHistoryPayload
schema = TicketHistoryPayload()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}/history", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for ticket","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Ticket ID for which history is created","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}/history", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getTicketHistory(self, id=None):
"""Gets history list for specific application level ticket, this history is seen on ticket detail page, this can be comment, log or rating.
:param id : Ticket ID for which history is to be fetched : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = LeadValidator.getTicketHistory()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}/history", """{"required":[{"name":"company_id","in":"path","description":"Company ID of application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for ticket","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Ticket ID for which history is to be fetched","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ticket/{id}/history", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCustomForm(self, slug=None):
"""Get specific custom form using it's slug, this is used to view the form.
:param slug : Slug of form whose response is getting submitted : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = LeadValidator.getCustomForm()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for the form","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"Slug of form whose response is getting submitted","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def editCustomForm(self, slug=None, body=""):
"""Edit the given custom form field such as adding or deleting input, assignee, title, decription, notification and polling information.
:param slug : Slug of form whose response is getting submitted : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = LeadValidator.editCustomForm()
schema.dump(schema.load(payload))
# Body validation
from .models.EditCustomFormPayload import EditCustomFormPayload
schema = EditCustomFormPayload()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for the form","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"Slug of form whose response is getting submitted","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getCustomForms(self, ):
"""Get list of custom form for given application
"""
payload = {}
# Parameter validation
schema = LeadValidator.getCustomForms()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for the form","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCustomForm(self, body=""):
"""Creates a new custom form for given application
"""
payload = {}
# Parameter validation
schema = LeadValidator.createCustomForm()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateCustomFormPayload import CreateCustomFormPayload
schema = CreateCustomFormPayload()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for the form","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/form", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getTokenForVideoRoom(self, unique_name=None):
"""Get Token to join a specific Video Room using it's unqiue name, this Token is your ticket to Room and also creates your identity there.
:param unique_name : Unique name of video room : type string
"""
payload = {}
if unique_name:
payload["unique_name"] = unique_name
# Parameter validation
schema = LeadValidator.getTokenForVideoRoom()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}/token", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for video room","required":true,"schema":{"type":"string"}},{"name":"unique_name","in":"path","description":"Unique name of video room","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", unique_name=unique_name)
query_string = await create_query_string(unique_name=unique_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}/token", unique_name=unique_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getVideoParticipants(self, unique_name=None):
"""Get participants of a specific Video Room using it's unique name, this can be used to check if people are already there in the room and also to show their names.
:param unique_name : Unique name of Video Room : type string
"""
payload = {}
if unique_name:
payload["unique_name"] = unique_name
# Parameter validation
schema = LeadValidator.getVideoParticipants()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}/participants", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for video room","required":true,"schema":{"type":"string"}},{"name":"unique_name","in":"path","description":"Unique name of Video Room","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", unique_name=unique_name)
query_string = await create_query_string(unique_name=unique_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}/participants", unique_name=unique_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def openVideoRoom(self, body=""):
"""Open a video room.
"""
payload = {}
# Parameter validation
schema = LeadValidator.openVideoRoom()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateVideoRoomPayload import CreateVideoRoomPayload
schema = CreateVideoRoomPayload()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for video room","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def closeVideoRoom(self, unique_name=None):
"""Close the video room and force all participants to leave.
:param unique_name : Unique name of Video Room : type string
"""
payload = {}
if unique_name:
payload["unique_name"] = unique_name
# Parameter validation
schema = LeadValidator.closeVideoRoom()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}", """{"required":[{"name":"company_id","in":"path","description":"Company ID of the application","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID for video room","required":true,"schema":{"type":"string"}},{"name":"unique_name","in":"path","description":"Unique name of Video Room","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", unique_name=unique_name)
query_string = await create_query_string(unique_name=unique_name)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/lead/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/video/room/{unique_name}", unique_name=unique_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Feedback:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getAttributes(self, page_no=None, page_size=None):
"""Provides a list of all attribute data.
:param page_no : pagination page no : type integer
:param page_size : pagination page size : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = FeedbackValidator.getAttributes()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/attributes/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[{"description":"pagination page no","in":"query","name":"page_no","schema":{"type":"integer"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"query":[{"description":"pagination page no","in":"query","name":"page_no","schema":{"type":"integer"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/attributes/", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCustomerReviews(self, id=None, entity_id=None, entity_type=None, user_id=None, media=None, rating=None, attribute_rating=None, facets=None, sort=None, next=None, start=None, limit=None, count=None, page_id=None, page_size=None):
"""The endpoint provides a list of customer reviews based on entity and provided filters
:param id : review id : type string
:param entity_id : entity id : type string
:param entity_type : entity type : type string
:param user_id : user id : type string
:param media : media type e.g. image | video | video_file | video_link : type string
:param rating : rating filter, 1-5 : type array
:param attribute_rating : attribute rating filter with ma,e of attribute : type array
:param facets : facets (true|false) : type boolean
:param sort : sort by : default | top | recent : type string
:param next : pagination next : type string
:param start : pagination start : type string
:param limit : pagination limit : type string
:param count : pagination count : type string
:param page_id : pagination page id : type string
:param page_size : pagination page size : type integer
"""
payload = {}
if id:
payload["id"] = id
if entity_id:
payload["entity_id"] = entity_id
if entity_type:
payload["entity_type"] = entity_type
if user_id:
payload["user_id"] = user_id
if media:
payload["media"] = media
if rating:
payload["rating"] = rating
if attribute_rating:
payload["attribute_rating"] = attribute_rating
if facets:
payload["facets"] = facets
if sort:
payload["sort"] = sort
if next:
payload["next"] = next
if start:
payload["start"] = start
if limit:
payload["limit"] = limit
if count:
payload["count"] = count
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = FeedbackValidator.getCustomerReviews()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[{"description":"review id","in":"query","name":"id","schema":{"type":"string"}},{"description":"entity id","in":"query","name":"entity_id","schema":{"type":"string"}},{"description":"entity type","in":"query","name":"entity_type","schema":{"type":"string"}},{"description":"user id","in":"query","name":"user_id","schema":{"type":"string"}},{"description":"media type e.g. image | video | video_file | video_link","in":"query","name":"media","schema":{"type":"string"}},{"description":"rating filter, 1-5","explode":false,"in":"query","name":"rating","schema":{"items":{"type":"number"},"type":"array"},"style":"form"},{"description":"attribute rating filter with ma,e of attribute","explode":false,"in":"query","name":"attribute_rating","schema":{"items":{"type":"string"},"type":"array"},"style":"form"},{"description":"facets (true|false)","in":"query","name":"facets","schema":{"type":"boolean"}},{"description":"sort by : default | top | recent","in":"query","name":"sort","schema":{"type":"string"}},{"description":"pagination next","in":"query","name":"next","schema":{"type":"string"}},{"description":"pagination start","in":"query","name":"start","schema":{"type":"string"}},{"description":"pagination limit","in":"query","name":"limit","schema":{"type":"string"}},{"description":"pagination count","in":"query","name":"count","schema":{"type":"string"}},{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"query":[{"description":"review id","in":"query","name":"id","schema":{"type":"string"}},{"description":"entity id","in":"query","name":"entity_id","schema":{"type":"string"}},{"description":"entity type","in":"query","name":"entity_type","schema":{"type":"string"}},{"description":"user id","in":"query","name":"user_id","schema":{"type":"string"}},{"description":"media type e.g. image | video | video_file | video_link","in":"query","name":"media","schema":{"type":"string"}},{"description":"rating filter, 1-5","explode":false,"in":"query","name":"rating","schema":{"items":{"type":"number"},"type":"array"},"style":"form"},{"description":"attribute rating filter with ma,e of attribute","explode":false,"in":"query","name":"attribute_rating","schema":{"items":{"type":"string"},"type":"array"},"style":"form"},{"description":"facets (true|false)","in":"query","name":"facets","schema":{"type":"boolean"}},{"description":"sort by : default | top | recent","in":"query","name":"sort","schema":{"type":"string"}},{"description":"pagination next","in":"query","name":"next","schema":{"type":"string"}},{"description":"pagination start","in":"query","name":"start","schema":{"type":"string"}},{"description":"pagination limit","in":"query","name":"limit","schema":{"type":"string"}},{"description":"pagination count","in":"query","name":"count","schema":{"type":"string"}},{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"headers":[]}""", id=id, entity_id=entity_id, entity_type=entity_type, user_id=user_id, media=media, rating=rating, attribute_rating=attribute_rating, facets=facets, sort=sort, next=next, start=start, limit=limit, count=count, page_id=page_id, page_size=page_size)
query_string = await create_query_string(id=id, entity_id=entity_id, entity_type=entity_type, user_id=user_id, media=media, rating=rating, attribute_rating=attribute_rating, facets=facets, sort=sort, next=next, start=start, limit=limit, count=count, page_id=page_id, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/", id=id, entity_id=entity_id, entity_type=entity_type, user_id=user_id, media=media, rating=rating, attribute_rating=attribute_rating, facets=facets, sort=sort, next=next, start=start, limit=limit, count=count, page_id=page_id, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateApprove(self, review_id=None, body=""):
"""The is used to update approve details like status and description text
:param review_id : review id : type string
"""
payload = {}
if review_id:
payload["review_id"] = review_id
# Parameter validation
schema = FeedbackValidator.updateApprove()
schema.dump(schema.load(payload))
# Body validation
from .models.ApproveRequest import ApproveRequest
schema = ApproveRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/{review_id}/approve/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"review id","in":"path","name":"review_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", review_id=review_id)
query_string = await create_query_string(review_id=review_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/{review_id}/approve/", review_id=review_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getHistory(self, review_id=None):
"""The is used to get the history details like status and description text
:param review_id : review id : type string
"""
payload = {}
if review_id:
payload["review_id"] = review_id
# Parameter validation
schema = FeedbackValidator.getHistory()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/{review_id}/history/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"review id","in":"path","name":"review_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", review_id=review_id)
query_string = await create_query_string(review_id=review_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/reviews/{review_id}/history/", review_id=review_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getApplicationTemplates(self, page_id=None, page_size=None):
"""Get all templates of application
:param page_id : pagination page id : type string
:param page_size : pagination page size : type integer
"""
payload = {}
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = FeedbackValidator.getApplicationTemplates()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"query":[{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"headers":[]}""", page_id=page_id, page_size=page_size)
query_string = await create_query_string(page_id=page_id, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/", page_id=page_id, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createTemplate(self, body=""):
"""Create a new template for review with following data:
- Enable media, rating and review
- Rating - active/inactive/selected rate choices, attributes, text on rate, comment for each rate, type
- Review - header, title, description, image and video meta, enable votes
"""
payload = {}
# Parameter validation
schema = FeedbackValidator.createTemplate()
schema.dump(schema.load(payload))
# Body validation
from .models.TemplateRequestList import TemplateRequestList
schema = TemplateRequestList()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getTemplateById(self, id=None):
"""Get the template for product or l3 type by ID
:param id : template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = FeedbackValidator.getTemplateById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"template id","in":"path","name":"id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateTemplate(self, id=None, body=""):
"""Update existing template status, active/archive
:param id : template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = FeedbackValidator.updateTemplate()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateTemplateRequest import UpdateTemplateRequest
schema = UpdateTemplateRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"template id","in":"path","name":"id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateTemplateStatus(self, id=None, body=""):
"""Update existing template status, active/archive
:param id : template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = FeedbackValidator.updateTemplateStatus()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateTemplateStatusRequest import UpdateTemplateStatusRequest
schema = UpdateTemplateStatusRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/status/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"template id","in":"path","name":"id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/feedback/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/templates/{id}/status/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
class Theme:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getAllPages(self, theme_id=None):
"""Use this API to retrieve all the available pages of a theme by its ID.
:param theme_id : ID of the theme to be retrieved : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.getAllPages()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme to be retrieved","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createPage(self, theme_id=None, body=""):
"""Use this API to create a page for a theme by its ID.
:param theme_id : ID of the theme : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.createPage()
schema.dump(schema.load(payload))
# Body validation
from .models.AvailablePageSchema import AvailablePageSchema
schema = AvailablePageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateMultiplePages(self, theme_id=None, body=""):
"""Use this API to update multiple pages of a theme by its ID.
:param theme_id : ID of the theme to be retrieved : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.updateMultiplePages()
schema.dump(schema.load(payload))
# Body validation
from .models.AllAvailablePageSchema import AllAvailablePageSchema
schema = AllAvailablePageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme to be retrieved","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/page", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPage(self, theme_id=None, page_value=None):
"""Use this API to retrieve a page of a theme.
:param theme_id : ID of the theme to be retrieved : type string
:param page_value : Value of the page to be retrieved : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
if page_value:
payload["page_value"] = page_value
# Parameter validation
schema = ThemeValidator.getPage()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme to be retrieved","required":true,"schema":{"type":"string"}},{"name":"page_value","in":"path","description":"Value of the page to be retrieved","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id, page_value=page_value)
query_string = await create_query_string(theme_id=theme_id, page_value=page_value)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", theme_id=theme_id, page_value=page_value), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updatePage(self, theme_id=None, page_value=None, body=""):
"""Use this API to update a page for a theme by its ID.
:param theme_id : ID of the theme : type string
:param page_value : Value of the page to be updated : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
if page_value:
payload["page_value"] = page_value
# Parameter validation
schema = ThemeValidator.updatePage()
schema.dump(schema.load(payload))
# Body validation
from .models.AvailablePageSchema import AvailablePageSchema
schema = AvailablePageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme","required":true,"schema":{"type":"string"}},{"name":"page_value","in":"path","description":"Value of the page to be updated","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id, page_value=page_value)
query_string = await create_query_string(theme_id=theme_id, page_value=page_value)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", theme_id=theme_id, page_value=page_value), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deletePage(self, theme_id=None, page_value=None):
"""Use this API to delete a page for a theme by its ID and page_value.
:param theme_id : ID of the theme : type string
:param page_value : Value of the page to be updated : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
if page_value:
payload["page_value"] = page_value
# Parameter validation
schema = ThemeValidator.deletePage()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID of the theme","required":true,"schema":{"type":"string"}},{"name":"page_value","in":"path","description":"Value of the page to be updated","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id, page_value=page_value)
query_string = await create_query_string(theme_id=theme_id, page_value=page_value)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/{page_value}", theme_id=theme_id, page_value=page_value), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getThemeLibrary(self, page_size=None, page_no=None):
"""Theme library is a personalized collection of themes that are chosen and added from the available themes. Use this API to fetch a list of themes from the library along with their configuration details.
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
"""
payload = {}
if page_size:
payload["page_size"] = page_size
if page_no:
payload["page_no"] = page_no
# Parameter validation
schema = ThemeValidator.getThemeLibrary()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/library", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10. ","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}}],"query":[{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10. ","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}}],"headers":[]}""", page_size=page_size, page_no=page_no)
query_string = await create_query_string(page_size=page_size, page_no=page_no)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/library", page_size=page_size, page_no=page_no), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addToThemeLibrary(self, body=""):
"""Theme library is a personalized collection of themes that are chosen and added from the available themes. Use this API to choose a theme and add it to the theme library.
"""
payload = {}
# Parameter validation
schema = ThemeValidator.addToThemeLibrary()
schema.dump(schema.load(payload))
# Body validation
from .models.AddThemeRequestSchema import AddThemeRequestSchema
schema = AddThemeRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/library", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/library", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def applyTheme(self, body=""):
"""Use this API to apply a theme to the website.
"""
payload = {}
# Parameter validation
schema = ThemeValidator.applyTheme()
schema.dump(schema.load(payload))
# Body validation
from .models.AddThemeRequestSchema import AddThemeRequestSchema
schema = AddThemeRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/apply", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/apply", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def isUpgradable(self, theme_id=None):
"""There's always a possibility that new features get added to a theme. Use this API to check if the applied theme has an upgrade available.
:param theme_id : Theme ID : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.isUpgradable()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/upgradable", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"Theme ID","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/upgradable", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def upgradeTheme(self, theme_id=None):
"""Use this API to upgrade the current theme to its latest version.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.upgradeTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/upgrade", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/upgrade", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getPublicThemes(self, page_size=None, page_no=None):
"""Use this API to get a list of free themes that you can apply to your website.
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
"""
payload = {}
if page_size:
payload["page_size"] = page_size
if page_no:
payload["page_no"] = page_no
# Parameter validation
schema = ThemeValidator.getPublicThemes()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/list/public", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10. ","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1. ","required":false,"schema":{"type":"integer","default":1}}],"query":[{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10. ","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1. ","required":false,"schema":{"type":"integer","default":1}}],"headers":[]}""", page_size=page_size, page_no=page_no)
query_string = await create_query_string(page_size=page_size, page_no=page_no)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/list/public", page_size=page_size, page_no=page_no), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createTheme(self, body=""):
"""Themes improve the look and appearance of a website. Use this API to create a theme.
"""
payload = {}
# Parameter validation
schema = ThemeValidator.createTheme()
schema.dump(schema.load(payload))
# Body validation
from .models.ThemesSchema import ThemesSchema
schema = ThemesSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppliedTheme(self, ):
"""Use this API to retrieve the theme that is currently applied to the website along with its details.
"""
payload = {}
# Parameter validation
schema = ThemeValidator.getAppliedTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getFonts(self, ):
"""Font is a collection of characters with a similar design. Use this API to retrieve a list of website fonts.
"""
payload = {}
# Parameter validation
schema = ThemeValidator.getFonts()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/fonts", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/fonts", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getThemeById(self, theme_id=None):
"""Use this API to retrieve the details of a specific theme by using its ID.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.getThemeById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateTheme(self, theme_id=None, body=""):
"""Use this API to edit an existing theme. You can customize the website font, sections, images, styles, and many more.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.updateTheme()
schema.dump(schema.load(payload))
# Body validation
from .models.ThemesSchema import ThemesSchema
schema = ThemesSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteTheme(self, theme_id=None):
"""Use this API to delete a theme from the theme library.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.deleteTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getThemeForPreview(self, theme_id=None):
"""A theme can be previewed before applying it. Use this API to retrieve the theme preview by using its ID.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.getThemeForPreview()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/preview", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/preview", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def publishTheme(self, theme_id=None):
"""Use this API to publish a theme that is either newly created or edited.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.publishTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/publish", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/publish", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def unpublishTheme(self, theme_id=None):
"""Use this API to remove an existing theme from the list of available themes.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.unpublishTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/unpublish", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/unpublish", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def archiveTheme(self, theme_id=None):
"""Use this API to store an existing theme but not delete it so that it can be used in future if required.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.archiveTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/archive", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/archive", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def unarchiveTheme(self, theme_id=None):
"""Use this API to restore an archived theme and bring it back for editing or publishing.
:param theme_id : ID allotted to the theme. : type string
"""
payload = {}
if theme_id:
payload["theme_id"] = theme_id
# Parameter validation
schema = ThemeValidator.unarchiveTheme()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/unarchive", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"theme_id","in":"path","description":"ID allotted to the theme.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", theme_id=theme_id)
query_string = await create_query_string(theme_id=theme_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/theme/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/{theme_id}/unarchive", theme_id=theme_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class User:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getCustomers(self, q=None, page_size=None, page_no=None):
"""Use this API to retrieve a list of customers who have registered in the application.
:param q : The search query. Mobile number or email ID of a customer. : type string
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
"""
payload = {}
if q:
payload["q"] = q
if page_size:
payload["page_size"] = page_size
if page_no:
payload["page_no"] = page_no
# Parameter validation
schema = UserValidator.getCustomers()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/list", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"q","in":"query","description":"The search query. Mobile number or email ID of a customer.","required":false,"schema":{"type":"string"}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1. ","required":false,"schema":{"type":"integer","default":1}}],"query":[{"name":"q","in":"query","description":"The search query. Mobile number or email ID of a customer.","required":false,"schema":{"type":"string"}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1. ","required":false,"schema":{"type":"integer","default":1}}],"headers":[]}""", q=q, page_size=page_size, page_no=page_no)
query_string = await create_query_string(q=q, page_size=page_size, page_no=page_no)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/list", q=q, page_size=page_size, page_no=page_no), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def searchUsers(self, q=None):
"""Use this API to retrieve an existing user from a list.
:param q : The search query. Mobile number or email ID of a customer. : type object
"""
payload = {}
if q:
payload["q"] = q
# Parameter validation
schema = UserValidator.searchUsers()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/search", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"q","in":"query","description":"The search query. Mobile number or email ID of a customer.","required":false,"schema":{"type":"object"}}],"query":[{"name":"q","in":"query","description":"The search query. Mobile number or email ID of a customer.","required":false,"schema":{"type":"object"}}],"headers":[]}""", q=q)
query_string = await create_query_string(q=q)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/search", q=q), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createUser(self, body=""):
"""Create user
"""
payload = {}
# Parameter validation
schema = UserValidator.createUser()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateUserRequestSchema import CreateUserRequestSchema
schema = CreateUserRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateUser(self, user_id=None, body=""):
"""Update user
:param user_id : User ID : type string
"""
payload = {}
if user_id:
payload["user_id"] = user_id
# Parameter validation
schema = UserValidator.updateUser()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateUserRequestSchema import UpdateUserRequestSchema
schema = UpdateUserRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/{user_id}", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}},{"name":"user_id","in":"path","description":"User ID","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", user_id=user_id)
query_string = await create_query_string(user_id=user_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/{user_id}", user_id=user_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def createUserSession(self, body=""):
"""Create user session
"""
payload = {}
# Parameter validation
schema = UserValidator.createUserSession()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateUserSessionRequestSchema import CreateUserSessionRequestSchema
schema = CreateUserSessionRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/session", """{"required":[{"name":"company_id","in":"path","description":"Company ID","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application ID","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/customers/session", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPlatformConfig(self, ):
"""Use this API to get all the platform configurations such as mobile image, desktop image, social logins, and all other text.
"""
payload = {}
# Parameter validation
schema = UserValidator.getPlatformConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/platform/config", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/platform/config", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updatePlatformConfig(self, body=""):
"""Use this API to edit the existing platform configurations such as mobile image, desktop image, social logins, and all other text.
"""
payload = {}
# Parameter validation
schema = UserValidator.updatePlatformConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.PlatformSchema import PlatformSchema
schema = PlatformSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/platform/config", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/user/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/platform/config", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
class Content:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getAnnouncementsList(self, page_no=None, page_size=None):
"""Announcements are useful to highlight a message or information on top of a webpage. Use this API to retrieve a list of announcements.
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getAnnouncementsList()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createAnnouncement(self, body=""):
"""Announcements are useful to highlight a message or information on top of a webpage. Use this API to create an announcement.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createAnnouncement()
schema.dump(schema.load(payload))
# Body validation
from .models.AdminAnnouncementSchema import AdminAnnouncementSchema
schema = AdminAnnouncementSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAnnouncementById(self, announcement_id=None):
"""Use this API to retrieve an announcement and its details such as the target platform and pages on which it's applicable
:param announcement_id : ID allotted to the announcement. : type string
"""
payload = {}
if announcement_id:
payload["announcement_id"] = announcement_id
# Parameter validation
schema = ContentValidator.getAnnouncementById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"announcement_id","in":"path","description":"ID allotted to the announcement.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", announcement_id=announcement_id)
query_string = await create_query_string(announcement_id=announcement_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", announcement_id=announcement_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAnnouncement(self, announcement_id=None, body=""):
"""Use this API to edit an existing announcement and its details such as the target platform and pages on which it's applicable
:param announcement_id : ID allotted to the announcement. : type string
"""
payload = {}
if announcement_id:
payload["announcement_id"] = announcement_id
# Parameter validation
schema = ContentValidator.updateAnnouncement()
schema.dump(schema.load(payload))
# Body validation
from .models.AdminAnnouncementSchema import AdminAnnouncementSchema
schema = AdminAnnouncementSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"announcement_id","in":"path","description":"ID allotted to the announcement.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", announcement_id=announcement_id)
query_string = await create_query_string(announcement_id=announcement_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", announcement_id=announcement_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateAnnouncementSchedule(self, announcement_id=None, body=""):
"""Use this API to edit the duration, i.e. start date-time and end date-time of an announcement. Moreover, you can enable/disable an announcement using this API.
:param announcement_id : ID allotted to the announcement. : type string
"""
payload = {}
if announcement_id:
payload["announcement_id"] = announcement_id
# Parameter validation
schema = ContentValidator.updateAnnouncementSchedule()
schema.dump(schema.load(payload))
# Body validation
from .models.ScheduleSchema import ScheduleSchema
schema = ScheduleSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"announcement_id","in":"path","description":"ID allotted to the announcement.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", announcement_id=announcement_id)
query_string = await create_query_string(announcement_id=announcement_id)
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", announcement_id=announcement_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteAnnouncement(self, announcement_id=None):
"""Use this API to delete an existing announcement.
:param announcement_id : ID allotted to the announcement. : type string
"""
payload = {}
if announcement_id:
payload["announcement_id"] = announcement_id
# Parameter validation
schema = ContentValidator.deleteAnnouncement()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"announcement_id","in":"path","description":"ID allotted to the announcement.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", announcement_id=announcement_id)
query_string = await create_query_string(announcement_id=announcement_id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/announcements/{announcement_id}", announcement_id=announcement_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createBlog(self, body=""):
"""Use this API to create a blog.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createBlog()
schema.dump(schema.load(payload))
# Body validation
from .models.BlogRequest import BlogRequest
schema = BlogRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getBlogs(self, page_no=None, page_size=None):
"""Use this API to get a list of blogs along with their details, such as the title, reading time, publish status, feature image, tags, author, etc.
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getBlogs()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateBlog(self, id=None, body=""):
"""Use this API to update the details of an existing blog which includes title, feature image, content, SEO details, expiry, etc.
:param id : ID allotted to the blog. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updateBlog()
schema.dump(schema.load(payload))
# Body validation
from .models.BlogRequest import BlogRequest
schema = BlogRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the blog.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteBlog(self, id=None):
"""Use this API to delete a blog.
:param id : ID allotted to the blog. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deleteBlog()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the blog.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getComponentById(self, slug=None):
"""Use this API to retrieve the components of a blog, such as title, slug, feature image, content, schedule, publish status, author, etc.
:param slug : A short, human-readable, URL-friendly identifier of a blog page. You can get slug value of a blog from `getBlogs` API. : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = ContentValidator.getComponentById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"A short, human-readable, URL-friendly identifier of a blog page. You can get slug value of a blog from `getBlogs` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/blogs/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getFaqCategories(self, ):
"""FAQs can be divided into categories. Use this API to get a list of FAQ categories.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getFaqCategories()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/categories", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/categories", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getFaqCategoryBySlugOrId(self, id_or_slug=None):
"""FAQs can be divided into categories. Use this API to get an FAQ categories using its slug or ID.
:param id_or_slug : ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API. : type string
"""
payload = {}
if id_or_slug:
payload["id_or_slug"] = id_or_slug
# Parameter validation
schema = ContentValidator.getFaqCategoryBySlugOrId()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id_or_slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id_or_slug","in":"path","description":"ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id_or_slug=id_or_slug)
query_string = await create_query_string(id_or_slug=id_or_slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id_or_slug}", id_or_slug=id_or_slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createFaqCategory(self, body=""):
"""FAQs help users to solve an issue or know more about a process. FAQs can be categorized separately, for e.g. some questions can be related to payment, some could be related to purchase, shipping, navigating, etc. Use this API to create an FAQ category.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createFaqCategory()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateFaqCategoryRequestSchema import CreateFaqCategoryRequestSchema
schema = CreateFaqCategoryRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateFaqCategory(self, id=None, body=""):
"""Use this API to edit an existing FAQ category.
:param id : ID allotted to an FAQ category. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updateFaqCategory()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateFaqCategoryRequestSchema import UpdateFaqCategoryRequestSchema
schema = UpdateFaqCategoryRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to an FAQ category.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteFaqCategory(self, id=None):
"""Use this API to delete an FAQ category.
:param id : ID allotted to an FAQ category. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deleteFaqCategory()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to an FAQ category.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getFaqsByCategoryIdOrSlug(self, id_or_slug=None):
"""Use this API to retrieve all the commonly asked question and answers belonging to an FAQ category.
:param id_or_slug : ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API. : type string
"""
payload = {}
if id_or_slug:
payload["id_or_slug"] = id_or_slug
# Parameter validation
schema = ContentValidator.getFaqsByCategoryIdOrSlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id_or_slug}/faqs", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id_or_slug","in":"path","description":"ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id_or_slug=id_or_slug)
query_string = await create_query_string(id_or_slug=id_or_slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{id_or_slug}/faqs", id_or_slug=id_or_slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addFaq(self, category_id=None, body=""):
"""FAQs help users to solve an issue or know more about a process. Use this API to create an FAQ for a given FAQ category.
:param category_id : ID allotted to an FAQ category. : type string
"""
payload = {}
if category_id:
payload["category_id"] = category_id
# Parameter validation
schema = ContentValidator.addFaq()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateFaqSchema import CreateFaqSchema
schema = CreateFaqSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faqs", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"category_id","in":"path","description":"ID allotted to an FAQ category.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", category_id=category_id)
query_string = await create_query_string(category_id=category_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faqs", category_id=category_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateFaq(self, category_id=None, faq_id=None, body=""):
"""Use this API to edit an existing FAQ.
:param category_id : ID allotted to an FAQ category. : type string
:param faq_id : ID allotted to an FAQ. : type string
"""
payload = {}
if category_id:
payload["category_id"] = category_id
if faq_id:
payload["faq_id"] = faq_id
# Parameter validation
schema = ContentValidator.updateFaq()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateFaqSchema import CreateFaqSchema
schema = CreateFaqSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faq/{faq_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"category_id","in":"path","description":"ID allotted to an FAQ category.","required":true,"schema":{"type":"string"}},{"name":"faq_id","in":"path","description":"ID allotted to an FAQ.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", category_id=category_id, faq_id=faq_id)
query_string = await create_query_string(category_id=category_id, faq_id=faq_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faq/{faq_id}", category_id=category_id, faq_id=faq_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteFaq(self, category_id=None, faq_id=None):
"""Use this API to delete an existing FAQ.
:param category_id : ID allotted to an FAQ category. : type string
:param faq_id : ID allotted to an FAQ. : type string
"""
payload = {}
if category_id:
payload["category_id"] = category_id
if faq_id:
payload["faq_id"] = faq_id
# Parameter validation
schema = ContentValidator.deleteFaq()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faq/{faq_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"category_id","in":"path","description":"ID allotted to an FAQ category.","required":true,"schema":{"type":"string"}},{"name":"faq_id","in":"path","description":"ID allotted to an FAQ.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", category_id=category_id, faq_id=faq_id)
query_string = await create_query_string(category_id=category_id, faq_id=faq_id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/category/{category_id}/faq/{faq_id}", category_id=category_id, faq_id=faq_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getFaqByIdOrSlug(self, id_or_slug=None):
"""Use this API to retrieve a specific FAQ. You will get the question and answer of that FAQ.
:param id_or_slug : ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API. : type string
"""
payload = {}
if id_or_slug:
payload["id_or_slug"] = id_or_slug
# Parameter validation
schema = ContentValidator.getFaqByIdOrSlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/{id_or_slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id_or_slug","in":"path","description":"ID or the slug allotted to an FAQ category. Slug is a short, human-readable, URL-friendly identifier of an object. You can get slug value of an FAQ category from `getFaqCategories` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id_or_slug=id_or_slug)
query_string = await create_query_string(id_or_slug=id_or_slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/faq/{id_or_slug}", id_or_slug=id_or_slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getLandingPages(self, page_no=None, page_size=None):
"""Landing page is the first page that a prospect lands upon while visiting a website. Use this API to fetch a list of landing pages.
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getLandingPages()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createLandingPage(self, body=""):
"""Landing page is the first page that a prospect lands upon while visiting a website. Use this API to create a landing page.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createLandingPage()
schema.dump(schema.load(payload))
# Body validation
from .models.LandingPageSchema import LandingPageSchema
schema = LandingPageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateLandingPage(self, id=None, body=""):
"""Use this API to edit the details of an existing landing page.
:param id : ID allotted to a landing page. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updateLandingPage()
schema.dump(schema.load(payload))
# Body validation
from .models.LandingPageSchema import LandingPageSchema
schema = LandingPageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to a landing page.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteLandingPage(self, id=None):
"""Use this API to delete an existing landing page.
:param id : ID allotted to a landing page. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deleteLandingPage()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to a landing page.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/landing-page/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getLegalInformation(self, ):
"""Use this API to get the legal information of an application, which includes Policy, Terms and Conditions, Shipping Policy and FAQ regarding the application.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getLegalInformation()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/legal", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/legal", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateLegalInformation(self, body=""):
"""Use this API to edit, update and save the legal information of an application, which includes Policy, Terms and Conditions, Shipping Policy and FAQ regarding the application.
"""
payload = {}
# Parameter validation
schema = ContentValidator.updateLegalInformation()
schema.dump(schema.load(payload))
# Body validation
from .models.ApplicationLegal import ApplicationLegal
schema = ApplicationLegal()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/legal", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/legal", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getNavigations(self, device_platform=None, page_no=None, page_size=None):
"""Use this API to fetch the navigations details which includes the items of the navigation pane. It also shows the orientation, links, sub-navigations, etc.
:param device_platform : Filter navigations by platform. Acceptable values are: web, android, ios, all : type string
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if device_platform:
payload["device_platform"] = device_platform
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getNavigations()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"device_platform","in":"query","description":"Filter navigations by platform. Acceptable values are: web, android, ios, all","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"device_platform","in":"query","description":"Filter navigations by platform. Acceptable values are: web, android, ios, all","required":true,"schema":{"type":"string"}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", device_platform=device_platform, page_no=page_no, page_size=page_size)
query_string = await create_query_string(device_platform=device_platform, page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/", device_platform=device_platform, page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createNavigation(self, body=""):
"""Navigation is the arrangement of navigational items to ease the accessibility of resources for users on a website. Use this API to create a navigation.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createNavigation()
schema.dump(schema.load(payload))
# Body validation
from .models.NavigationRequest import NavigationRequest
schema = NavigationRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getDefaultNavigations(self, ):
"""On any website (application), there are navigations that are present by default. Use this API to retrieve those default navigations.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getDefaultNavigations()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/default", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/default", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getNavigationBySlug(self, slug=None, device_platform=None):
"""Use this API to retrieve a navigation by its slug.
:param slug : A short, human-readable, URL-friendly identifier of a navigation. You can get slug value of a navigation from `getNavigations` API. : type string
:param device_platform : Filter navigations by platform. Acceptable values are: web, android, ios, all : type string
"""
payload = {}
if slug:
payload["slug"] = slug
if device_platform:
payload["device_platform"] = device_platform
# Parameter validation
schema = ContentValidator.getNavigationBySlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"A short, human-readable, URL-friendly identifier of a navigation. You can get slug value of a navigation from `getNavigations` API.","required":true,"schema":{"type":"string"}},{"name":"device_platform","in":"query","description":"Filter navigations by platform. Acceptable values are: web, android, ios, all","required":true,"schema":{"type":"string"}}],"optional":[],"query":[{"name":"device_platform","in":"query","description":"Filter navigations by platform. Acceptable values are: web, android, ios, all","required":true,"schema":{"type":"string"}}],"headers":[]}""", slug=slug, device_platform=device_platform)
query_string = await create_query_string(slug=slug, device_platform=device_platform)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{slug}", slug=slug, device_platform=device_platform), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateNavigation(self, id=None, body=""):
"""Use this API to edit the details of an existing navigation.
:param id : ID allotted to the navigation. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updateNavigation()
schema.dump(schema.load(payload))
# Body validation
from .models.NavigationRequest import NavigationRequest
schema = NavigationRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the navigation.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteNavigation(self, id=None):
"""Use this API to delete an existing navigation.
:param id : ID allotted to the navigation. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deleteNavigation()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the navigation.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/navigations/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getPageMeta(self, ):
"""Use this API to get the meta of custom pages (blog, page) and default system pages (e.g. home/brand/category/collection).
"""
payload = {}
# Parameter validation
schema = ContentValidator.getPageMeta()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/meta", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/meta", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getPageSpec(self, ):
"""Use this API to get the specifications of a page, such as page type, display name, params and query.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getPageSpec()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/spec", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/spec", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createPagePreview(self, body=""):
"""Use this API to create a page preview to check the appearance of a custom page.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createPagePreview()
schema.dump(schema.load(payload))
# Body validation
from .models.PageRequest import PageRequest
schema = PageRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/preview/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/preview/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updatePagePreview(self, slug=None, body=""):
"""Use this API to change the publish status of an existing page. Allows you to publish and unpublish the page.
:param slug : A short, human-readable, URL-friendly identifier of a page. You can get slug value of a page from `getPages` API. : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = ContentValidator.updatePagePreview()
schema.dump(schema.load(payload))
# Body validation
from .models.PagePublishRequest import PagePublishRequest
schema = PagePublishRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/publish/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"A short, human-readable, URL-friendly identifier of a page. You can get slug value of a page from `getPages` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/publish/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deletePage(self, id=None):
"""Use this API to delete an existing page.
:param id : ID allotted to the page. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deletePage()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the page.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updatePathRedirectionRules(self, body=""):
"""Use this API to add, update or delete path-based redirection rules
"""
payload = {}
# Parameter validation
schema = ContentValidator.updatePathRedirectionRules()
schema.dump(schema.load(payload))
# Body validation
from .models.PathMappingSchema import PathMappingSchema
schema = PathMappingSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/path-mappings", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/path-mappings", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPathRedirectionRules(self, ):
"""Use this API to get path based redirection rules.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getPathRedirectionRules()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/path-mappings", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/path-mappings", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getSEOConfiguration(self, ):
"""Use this API to know how the SEO is configured in the application. This includes the sitemap, robot.txt, custom meta tags, etc.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getSEOConfiguration()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/seo", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/seo", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateSEOConfiguration(self, body=""):
"""Use this API to edit the SEO details of an application. This includes the sitemap, robot.txt, custom meta tags, etc.
"""
payload = {}
# Parameter validation
schema = ContentValidator.updateSEOConfiguration()
schema.dump(schema.load(payload))
# Body validation
from .models.SeoComponent import SeoComponent
schema = SeoComponent()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/seo", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/seo", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSlideshows(self, device_platform=None, page_no=None, page_size=None):
"""A slideshow is a group of images, videos or a combination of both that are shown on the website in the form of slides. Use this API to fetch a list of slideshows.
:param device_platform : Filter slideshows by platform. Acceptable values are: web, android, ios and all : type string
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if device_platform:
payload["device_platform"] = device_platform
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getSlideshows()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"device_platform","in":"query","description":"Filter slideshows by platform. Acceptable values are: web, android, ios and all","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"device_platform","in":"query","description":"Filter slideshows by platform. Acceptable values are: web, android, ios and all","required":true,"schema":{"type":"string"}},{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", device_platform=device_platform, page_no=page_no, page_size=page_size)
query_string = await create_query_string(device_platform=device_platform, page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/", device_platform=device_platform, page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createSlideshow(self, body=""):
"""A slideshow is a group of images, videos or a combination of both that are shown on the website in the form of slides. Use this API to create a slideshow.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createSlideshow()
schema.dump(schema.load(payload))
# Body validation
from .models.SlideshowRequest import SlideshowRequest
schema = SlideshowRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSlideshowBySlug(self, slug=None, device_platform=None):
"""Use this API to retrieve the details of a slideshow by its slug.
:param slug : A short, human-readable, URL-friendly identifier of a slideshow. You can get slug value of a page from `getSlideshows` API. : type string
:param device_platform : Filter slideshows by platform. Acceptable values are: web, android, ios and all : type string
"""
payload = {}
if slug:
payload["slug"] = slug
if device_platform:
payload["device_platform"] = device_platform
# Parameter validation
schema = ContentValidator.getSlideshowBySlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"A short, human-readable, URL-friendly identifier of a slideshow. You can get slug value of a page from `getSlideshows` API.","required":true,"schema":{"type":"string"}},{"name":"device_platform","in":"query","description":"Filter slideshows by platform. Acceptable values are: web, android, ios and all","required":true,"schema":{"type":"string"}}],"optional":[],"query":[{"name":"device_platform","in":"query","description":"Filter slideshows by platform. Acceptable values are: web, android, ios and all","required":true,"schema":{"type":"string"}}],"headers":[]}""", slug=slug, device_platform=device_platform)
query_string = await create_query_string(slug=slug, device_platform=device_platform)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{slug}", slug=slug, device_platform=device_platform), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateSlideshow(self, id=None, body=""):
"""Use this API to edit the details of an existing slideshow.
:param id : ID allotted to the slideshow. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updateSlideshow()
schema.dump(schema.load(payload))
# Body validation
from .models.SlideshowRequest import SlideshowRequest
schema = SlideshowRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the slideshow.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteSlideshow(self, id=None):
"""Use this API to delete an existing slideshow.
:param id : ID allotted to the slideshow. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.deleteSlideshow()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the slideshow.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/slideshows/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getSupportInformation(self, ):
"""Use this API to get the contact details for customer support, including emails and phone numbers.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getSupportInformation()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/support", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/support", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateSupportInformation(self, body=""):
"""Use this API to edit the existing contact details for customer support, including emails and phone numbers.
"""
payload = {}
# Parameter validation
schema = ContentValidator.updateSupportInformation()
schema.dump(schema.load(payload))
# Body validation
from .models.Support import Support
schema = Support()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/support", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/support", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateInjectableTag(self, body=""):
"""Use this API to edit the details of an existing tag. This includes the tag name, tag type (css/js), url and position of the tag.
"""
payload = {}
# Parameter validation
schema = ContentValidator.updateInjectableTag()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateTagRequestSchema import CreateTagRequestSchema
schema = CreateTagRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteAllInjectableTags(self, ):
"""Use this API to delete all the existing tags at once.
"""
payload = {}
# Parameter validation
schema = ContentValidator.deleteAllInjectableTags()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getInjectableTags(self, ):
"""Use this API to get all the CSS and JS injected in the application in the form of tags.
"""
payload = {}
# Parameter validation
schema = ContentValidator.getInjectableTags()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addInjectableTag(self, body=""):
"""CSS and JS can be injected in the application (website) with the help of tags. Use this API to create such tags by entering the tag name, tag type (css/js), url and position of the tag.
"""
payload = {}
# Parameter validation
schema = ContentValidator.addInjectableTag()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateTagRequestSchema import CreateTagRequestSchema
schema = CreateTagRequestSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/add", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/add", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def removeInjectableTag(self, body=""):
"""Use this API to delete an existing tag.
"""
payload = {}
# Parameter validation
schema = ContentValidator.removeInjectableTag()
schema.dump(schema.load(payload))
# Body validation
from .models.RemoveHandpickedSchema import RemoveHandpickedSchema
schema = RemoveHandpickedSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/remove/handpicked", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/remove/handpicked", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def editInjectableTag(self, tag_id=None, body=""):
"""Use this API to edit the details of an existing tag by its ID.
:param tag_id : ID allotted to the tag. : type string
"""
payload = {}
if tag_id:
payload["tag_id"] = tag_id
# Parameter validation
schema = ContentValidator.editInjectableTag()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateHandpickedSchema import UpdateHandpickedSchema
schema = UpdateHandpickedSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/edit/handpicked/{tag_id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform.","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Alphanumeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"tag_id","in":"path","description":"ID allotted to the tag.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", tag_id=tag_id)
query_string = await create_query_string(tag_id=tag_id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/tags/edit/handpicked/{tag_id}", tag_id=tag_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def createPage(self, body=""):
"""Use this API to create a custom page using a title, seo, publish status, feature image, tags, meta, etc.
"""
payload = {}
# Parameter validation
schema = ContentValidator.createPage()
schema.dump(schema.load(payload))
# Body validation
from .models.PageRequest import PageRequest
schema = PageRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPages(self, page_no=None, page_size=None):
"""Use this API to retrieve a list of pages.
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ContentValidator.getPages()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"page_no","in":"query","description":"The page number to navigate through the given set of results. Default value is 1.","required":false,"schema":{"type":"integer","default":1}},{"name":"page_size","in":"query","description":"The number of items to retrieve in each page. Default value is 10.","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updatePage(self, id=None, body=""):
"""Use this API to edit the details of an existing page, such as its title, seo, publish status, feature image, tags, schedule, etc.
:param id : ID allotted to the page. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ContentValidator.updatePage()
schema.dump(schema.load(payload))
# Body validation
from .models.PageSchema import PageSchema
schema = PageSchema()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{id}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"ID allotted to the page.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPageBySlug(self, slug=None):
"""Use this API to retrieve the components of a page, such as its title, seo, publish status, feature image, tags, schedule, etc.
:param slug : A short, human-readable, URL-friendly identifier of a page. You can get slug value of a page from `getPages` API. : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = ContentValidator.getPageBySlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{slug}", """{"required":[{"name":"company_id","in":"path","description":"Numeric ID allotted to a business account on Fynd Platform","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Numeric ID allotted to an application created within a business account.","required":true,"schema":{"type":"string"}},{"name":"slug","in":"path","description":"A short, human-readable, URL-friendly identifier of a page. You can get slug value of a page from `getPages` API.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/content/v2.0/company/{self._conf.companyId}/application/{self.applicationId}/pages/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Billing:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class Communication:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getCampaigns(self, page_no=None, page_size=None, sort=None):
"""Get campaigns
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getCampaigns()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCampaign(self, body=""):
"""Create campaign
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createCampaign()
schema.dump(schema.load(payload))
# Body validation
from .models.CampaignReq import CampaignReq
schema = CampaignReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getCampaignById(self, id=None):
"""Get campaign by id
:param id : Campaign id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getCampaignById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Campaign id","required":true,"schema":{"type":"string","example":"6009a1ea1f6a61d88e80a867"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateCampaignById(self, id=None, body=""):
"""Update campaign by id
:param id : Campaign id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateCampaignById()
schema.dump(schema.load(payload))
# Body validation
from .models.CampaignReq import CampaignReq
schema = CampaignReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Campaign id","required":true,"schema":{"type":"string","example":"6009a1ea1f6a61d88e80a867"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/campaigns/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getStatsOfCampaignById(self, id=None):
"""Get stats of campaign by id
:param id : Campaign id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getStatsOfCampaignById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/get-stats/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Campaign id","required":true,"schema":{"type":"string","example":"6009a1ea1f6a61d88e80a867"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/campaigns/get-stats/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAudiences(self, page_no=None, page_size=None, sort=None):
"""Get audiences
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getAudiences()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createAudience(self, body=""):
"""Create audience
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createAudience()
schema.dump(schema.load(payload))
# Body validation
from .models.AudienceReq import AudienceReq
schema = AudienceReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getBigqueryHeaders(self, body=""):
"""Get bigquery headers
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.getBigqueryHeaders()
schema.dump(schema.load(payload))
# Body validation
from .models.BigqueryHeadersReq import BigqueryHeadersReq
schema = BigqueryHeadersReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/bigquery-headers", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/bigquery-headers", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAudienceById(self, id=None):
"""Get audience by id
:param id : Audience id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getAudienceById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Audience id","required":true,"schema":{"type":"string","example":"5fb6675c09fd901023917a5f"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAudienceById(self, id=None, body=""):
"""Update audience by id
:param id : Audience id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateAudienceById()
schema.dump(schema.load(payload))
# Body validation
from .models.AudienceReq import AudienceReq
schema = AudienceReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Audience id","required":true,"schema":{"type":"string","example":"5fb6675c09fd901023917a5f"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/datasources/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getNSampleRecordsFromCsv(self, body=""):
"""Get n sample records from csv
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.getNSampleRecordsFromCsv()
schema.dump(schema.load(payload))
# Body validation
from .models.GetNRecordsCsvReq import GetNRecordsCsvReq
schema = GetNRecordsCsvReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/get-n-records", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sources/get-n-records", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getEmailProviders(self, page_no=None, page_size=None, sort=None):
"""Get email providers
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getEmailProviders()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createEmailProvider(self, body=""):
"""Create email provider
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createEmailProvider()
schema.dump(schema.load(payload))
# Body validation
from .models.EmailProviderReq import EmailProviderReq
schema = EmailProviderReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getEmailProviderById(self, id=None):
"""Get email provider by id
:param id : Email provider id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getEmailProviderById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Email provider id","required":true,"schema":{"type":"string","example":"5fd9fd44c474a7e3d5d376d6"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateEmailProviderById(self, id=None, body=""):
"""Update email provider by id
:param id : Email provider id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateEmailProviderById()
schema.dump(schema.load(payload))
# Body validation
from .models.EmailProviderReq import EmailProviderReq
schema = EmailProviderReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Email provider id","required":true,"schema":{"type":"string","example":"5fd9fd44c474a7e3d5d376d6"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/providers/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getEmailTemplates(self, page_no=None, page_size=None, sort=None):
"""Get email templates
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getEmailTemplates()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createEmailTemplate(self, body=""):
"""Create email template
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createEmailTemplate()
schema.dump(schema.load(payload))
# Body validation
from .models.EmailTemplateReq import EmailTemplateReq
schema = EmailTemplateReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSystemEmailTemplates(self, page_no=None, page_size=None, sort=None):
"""Get system email templates
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getSystemEmailTemplates()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/system-templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/system-templates", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getEmailTemplateById(self, id=None):
"""Get email template by id
:param id : Email template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getEmailTemplateById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Email template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateEmailTemplateById(self, id=None, body=""):
"""Update email template by id
:param id : Email template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateEmailTemplateById()
schema.dump(schema.load(payload))
# Body validation
from .models.EmailTemplateReq import EmailTemplateReq
schema = EmailTemplateReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Email template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteEmailTemplateById(self, id=None):
"""Delete email template by id
:param id : Email template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.deleteEmailTemplateById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Email template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/email/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getEventSubscriptions(self, page_no=None, page_size=None, populate=None):
"""Get event subscriptions
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param populate : populate fields : type string
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if populate:
payload["populate"] = populate
# Parameter validation
schema = CommunicationValidator.getEventSubscriptions()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/event/event-subscriptions", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"populate","in":"query","schema":{"type":"string"},"description":"populate fields"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"populate","in":"query","schema":{"type":"string"},"description":"populate fields"}],"headers":[]}""", page_no=page_no, page_size=page_size, populate=populate)
query_string = await create_query_string(page_no=page_no, page_size=page_size, populate=populate)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/event/event-subscriptions", page_no=page_no, page_size=page_size, populate=populate), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getJobs(self, page_no=None, page_size=None, sort=None):
"""Get jobs
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getJobs()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/jobs", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/jobs", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def triggerCampaignJob(self, body=""):
"""Trigger campaign job
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.triggerCampaignJob()
schema.dump(schema.load(payload))
# Body validation
from .models.TriggerJobRequest import TriggerJobRequest
schema = TriggerJobRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/trigger-job", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/trigger-job", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getJobLogs(self, page_no=None, page_size=None, sort=None):
"""Get job logs
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getJobLogs()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/logs", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/jobs/logs", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCommunicationLogs(self, page_id=None, page_size=None, sort=None, query=None):
"""Get communication logs
:param page_id : Current page no : type string
:param page_size : Current request items count : type integer
:param sort : To sort based on _id : type object
:param query : : type object
"""
payload = {}
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
if query:
payload["query"] = query
# Parameter validation
schema = CommunicationValidator.getCommunicationLogs()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/log", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_id","in":"query","schema":{"type":"string"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"_id":{"type":"integer"}}},"description":"To sort based on _id"},{"name":"query","in":"query","schema":{"type":"object"}}],"query":[{"name":"page_id","in":"query","schema":{"type":"string"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"_id":{"type":"integer"}}},"description":"To sort based on _id"},{"name":"query","in":"query","schema":{"type":"object"}}],"headers":[]}""", page_id=page_id, page_size=page_size, sort=sort, query=query)
query_string = await create_query_string(page_id=page_id, page_size=page_size, sort=sort, query=query)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/log", page_id=page_id, page_size=page_size, sort=sort, query=query), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getSmsProviders(self, page_no=None, page_size=None, sort=None):
"""Get sms providers
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getSmsProviders()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createSmsProvider(self, body=""):
"""Create sms provider
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createSmsProvider()
schema.dump(schema.load(payload))
# Body validation
from .models.SmsProviderReq import SmsProviderReq
schema = SmsProviderReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSmsProviderById(self, id=None):
"""Get sms provider by id
:param id : Sms provider id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getSmsProviderById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Sms provider id","required":true,"schema":{"type":"string","example":"5fd9fd07c474a7710dd376d5"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateSmsProviderById(self, id=None, body=""):
"""Update sms provider by id
:param id : Sms provider id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateSmsProviderById()
schema.dump(schema.load(payload))
# Body validation
from .models.SmsProviderReq import SmsProviderReq
schema = SmsProviderReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Sms provider id","required":true,"schema":{"type":"string","example":"5fd9fd07c474a7710dd376d5"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/providers/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSmsTemplates(self, page_no=None, page_size=None, sort=None):
"""Get sms templates
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getSmsTemplates()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createSmsTemplate(self, body=""):
"""Create sms template
"""
payload = {}
# Parameter validation
schema = CommunicationValidator.createSmsTemplate()
schema.dump(schema.load(payload))
# Body validation
from .models.SmsTemplateReq import SmsTemplateReq
schema = SmsTemplateReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getSmsTemplateById(self, id=None):
"""Get sms template by id
:param id : Sms template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.getSmsTemplateById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Sms template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateSmsTemplateById(self, id=None, body=""):
"""Update sms template by id
:param id : Sms template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.updateSmsTemplateById()
schema.dump(schema.load(payload))
# Body validation
from .models.SmsTemplateReq import SmsTemplateReq
schema = SmsTemplateReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Sms template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteSmsTemplateById(self, id=None):
"""Delete sms template by id
:param id : Sms template id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CommunicationValidator.deleteSmsTemplateById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}},{"in":"path","name":"id","description":"Sms template id","required":true,"schema":{"type":"string","example":"5ef42a49c8b67d279c27a980"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/templates/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getSystemSystemTemplates(self, page_no=None, page_size=None, sort=None):
"""Get system sms templates
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
:param sort : To sort based on created_at : type object
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if sort:
payload["sort"] = sort
# Parameter validation
schema = CommunicationValidator.getSystemSystemTemplates()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/system-templates", """{"required":[{"in":"path","name":"company_id","description":"Company id","required":true,"schema":{"type":"string","example":"1"}},{"in":"path","name":"application_id","description":"Application id","required":true,"schema":{"type":"string","example":"5ea6821b3425bb07c82a25c1"}}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"},{"name":"sort","in":"query","schema":{"type":"object","properties":{"created_at":{"type":"integer"}}},"description":"To sort based on created_at"}],"headers":[]}""", page_no=page_no, page_size=page_size, sort=sort)
query_string = await create_query_string(page_no=page_no, page_size=page_size, sort=sort)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/communication/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/sms/system-templates", page_no=page_no, page_size=page_size, sort=sort), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Payment:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getBrandPaymentGatewayConfig(self, ):
"""Get All Brand Payment Gateway Config Secret
"""
payload = {}
# Parameter validation
schema = PaymentValidator.getBrandPaymentGatewayConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def saveBrandPaymentGatewayConfig(self, body=""):
"""Save Config Secret For Brand Payment Gateway
"""
payload = {}
# Parameter validation
schema = PaymentValidator.saveBrandPaymentGatewayConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.PaymentGatewayConfigRequest import PaymentGatewayConfigRequest
schema = PaymentGatewayConfigRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateBrandPaymentGatewayConfig(self, body=""):
"""Save Config Secret For Brand Payment Gateway
"""
payload = {}
# Parameter validation
schema = PaymentValidator.updateBrandPaymentGatewayConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.PaymentGatewayConfigRequest import PaymentGatewayConfigRequest
schema = PaymentGatewayConfigRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/aggregator/request", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPaymentModeRoutes(self, refresh=None, request_type=None):
"""Use this API to get Get All Valid Payment Options for making payment
:param refresh : : type boolean
:param request_type : : type string
"""
payload = {}
if refresh:
payload["refresh"] = refresh
if request_type:
payload["request_type"] = request_type
# Parameter validation
schema = PaymentValidator.getPaymentModeRoutes()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/payment/options", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true},{"name":"refresh","in":"query","required":true,"schema":{"type":"boolean"}},{"name":"request_type","in":"query","required":true,"schema":{"type":"string"}}],"optional":[],"query":[{"name":"refresh","in":"query","required":true,"schema":{"type":"boolean"}},{"name":"request_type","in":"query","required":true,"schema":{"type":"string"}}],"headers":[]}""", refresh=refresh, request_type=request_type)
query_string = await create_query_string(refresh=refresh, request_type=request_type)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/payment/options", refresh=refresh, request_type=request_type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addBeneficiaryDetails(self, body=""):
"""Use this API to save bank details for returned/cancelled order to refund amount in his account.
"""
payload = {}
# Parameter validation
schema = PaymentValidator.addBeneficiaryDetails()
schema.dump(schema.load(payload))
# Body validation
from .models.AddBeneficiaryDetailsRequest import AddBeneficiaryDetailsRequest
schema = AddBeneficiaryDetailsRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/account", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/account", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getUserOrderBeneficiaries(self, order_id=None):
"""Get all active beneficiary details added by the user for refund
:param order_id : : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
# Parameter validation
schema = PaymentValidator.getUserOrderBeneficiaries()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/accounts/order", """{"required":[{"in":"query","name":"order_id","required":true,"schema":{"type":"string"}},{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[{"in":"query","name":"order_id","required":true,"schema":{"type":"string"}}],"headers":[]}""", order_id=order_id, )
query_string = await create_query_string(order_id=order_id, )
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/accounts/order", order_id=order_id, ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getUserBeneficiaries(self, order_id=None):
"""Get all active beneficiary details added by the user for refund
:param order_id : : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
# Parameter validation
schema = PaymentValidator.getUserBeneficiaries()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/accounts/user", """{"required":[{"in":"query","name":"order_id","required":true,"schema":{"type":"string"}},{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[{"in":"query","name":"order_id","required":true,"schema":{"type":"string"}}],"headers":[]}""", order_id=order_id, )
query_string = await create_query_string(order_id=order_id, )
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/refund/accounts/user", order_id=order_id, ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def confirmPayment(self, body=""):
"""Use this API to confirm payment after payment gateway accepted payment.
"""
payload = {}
# Parameter validation
schema = PaymentValidator.confirmPayment()
schema.dump(schema.load(payload))
# Body validation
from .models.PaymentConfirmationRequest import PaymentConfirmationRequest
schema = PaymentConfirmationRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/payment/confirm", """{"required":[{"name":"company_id","in":"path","description":"Company Id","schema":{"type":"integer"},"required":true},{"name":"application_id","in":"path","description":"Application id","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/payment/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/payment/confirm", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
class Order:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getOrderDetails(self, order_id=None, next=None, previous=None):
"""Get Orders
:param order_id : Order Id : type string
:param next : Next : type string
:param previous : Previous : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
if next:
payload["next"] = next
if previous:
payload["previous"] = previous
# Parameter validation
schema = OrderValidator.getOrderDetails()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/details", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[{"name":"order_id","in":"query","description":"Order Id","required":false,"schema":{"type":"string"}},{"name":"next","in":"query","description":"Next","required":false,"schema":{"type":"string"}},{"name":"previous","in":"query","description":"Previous","required":false,"schema":{"type":"string"}}],"query":[{"name":"order_id","in":"query","description":"Order Id","required":false,"schema":{"type":"string"}},{"name":"next","in":"query","description":"Next","required":false,"schema":{"type":"string"}},{"name":"previous","in":"query","description":"Previous","required":false,"schema":{"type":"string"}}],"headers":[]}""", order_id=order_id, next=next, previous=previous)
query_string = await create_query_string(order_id=order_id, next=next, previous=previous)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/details", order_id=order_id, next=next, previous=previous), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def trackShipmentPlatform(self, shipment_id=None):
"""Shipment Track
:param shipment_id : Shipment Id : type string
"""
payload = {}
if shipment_id:
payload["shipment_id"] = shipment_id
# Parameter validation
schema = OrderValidator.trackShipmentPlatform()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/{shipment_id}/track", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"shipment_id","in":"path","description":"Shipment Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", shipment_id=shipment_id)
query_string = await create_query_string(shipment_id=shipment_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/{shipment_id}/track", shipment_id=shipment_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def trackOrder(self, order_id=None):
"""Order Track
:param order_id : Order Id : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
# Parameter validation
schema = OrderValidator.trackOrder()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/track", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"order_id","in":"path","description":"Order Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", order_id=order_id)
query_string = await create_query_string(order_id=order_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/track", order_id=order_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def failedOrders(self, ):
"""Failed Orders
"""
payload = {}
# Parameter validation
schema = OrderValidator.failedOrders()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/failed", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/failed", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def reprocessOrder(self, order_id=None):
"""Order Reprocess
:param order_id : Order Id : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
# Parameter validation
schema = OrderValidator.reprocessOrder()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/reprocess", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"order_id","in":"path","description":"Order Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", order_id=order_id)
query_string = await create_query_string(order_id=order_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/reprocess", order_id=order_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateShipment(self, shipment_id=None, body=""):
"""Update the shipment
:param shipment_id : ID of the shipment. An order may contain multiple items and may get divided into one or more shipment, each having its own ID. : type string
"""
payload = {}
if shipment_id:
payload["shipment_id"] = shipment_id
# Parameter validation
schema = OrderValidator.updateShipment()
schema.dump(schema.load(payload))
# Body validation
from .models.ShipmentUpdateRequest import ShipmentUpdateRequest
schema = ShipmentUpdateRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/{shipment_id}/update", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"shipment_id","in":"path","description":"ID of the shipment. An order may contain multiple items and may get divided into one or more shipment, each having its own ID.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", shipment_id=shipment_id)
query_string = await create_query_string(shipment_id=shipment_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/{shipment_id}/update", shipment_id=shipment_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPlatformShipmentReasons(self, action=None):
"""Get reasons behind full or partial cancellation of a shipment
:param action : Action : type string
"""
payload = {}
if action:
payload["action"] = action
# Parameter validation
schema = OrderValidator.getPlatformShipmentReasons()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/reasons/{action}", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"action","in":"path","description":"Action","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", action=action)
query_string = await create_query_string(action=action)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/shipments/reasons/{action}", action=action), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getShipmentTrackDetails(self, order_id=None, shipment_id=None):
"""Track shipment
:param order_id : ID of the order. : type string
:param shipment_id : ID of the shipment. An order may contain multiple items and may get divided into one or more shipment, each having its own ID. : type string
"""
payload = {}
if order_id:
payload["order_id"] = order_id
if shipment_id:
payload["shipment_id"] = shipment_id
# Parameter validation
schema = OrderValidator.getShipmentTrackDetails()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/shipments/{shipment_id}/track", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"order_id","in":"path","description":"ID of the order.","required":true,"schema":{"type":"string"}},{"name":"shipment_id","in":"path","description":"ID of the shipment. An order may contain multiple items and may get divided into one or more shipment, each having its own ID.","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", order_id=order_id, shipment_id=shipment_id)
query_string = await create_query_string(order_id=order_id, shipment_id=shipment_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders/{order_id}/shipments/{shipment_id}/track", order_id=order_id, shipment_id=shipment_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getOrdersByApplicationId(self, page_no=None, page_size=None, from_date=None, to_date=None, q=None, stage=None, sales_channels=None, order_id=None, stores=None, status=None, dp=None, shorten_urls=None, filter_type=None):
"""Get Orders at Application Level
:param page_no : Current page number : type string
:param page_size : Page limit : type string
:param from_date : From Date : type string
:param to_date : To Date : type string
:param q : Keyword for Search : type string
:param stage : Specefic Order Stage : type string
:param sales_channels : Selected Sales Channel : type string
:param order_id : Order Id : type string
:param stores : Selected Stores : type string
:param status : Status of order : type string
:param dp : Delivery Partners : type string
:param shorten_urls : Shorten URL option : type boolean
:param filter_type : Filters : type string
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if from_date:
payload["from_date"] = from_date
if to_date:
payload["to_date"] = to_date
if q:
payload["q"] = q
if stage:
payload["stage"] = stage
if sales_channels:
payload["sales_channels"] = sales_channels
if order_id:
payload["order_id"] = order_id
if stores:
payload["stores"] = stores
if status:
payload["status"] = status
if dp:
payload["dp"] = dp
if shorten_urls:
payload["shorten_urls"] = shorten_urls
if filter_type:
payload["filter_type"] = filter_type
# Parameter validation
schema = OrderValidator.getOrdersByApplicationId()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"type":"string"}},{"name":"page_size","in":"query","description":"Page limit","required":false,"schema":{"type":"string"}},{"name":"from_date","in":"query","description":"From Date","required":false,"schema":{"type":"string"}},{"name":"to_date","in":"query","description":"To Date","required":false,"schema":{"type":"string"}},{"name":"q","in":"query","description":"Keyword for Search","required":false,"schema":{"type":"string"}},{"name":"stage","in":"query","description":"Specefic Order Stage","required":false,"schema":{"type":"string"}},{"name":"sales_channels","in":"query","description":"Selected Sales Channel","required":false,"schema":{"type":"string"}},{"name":"order_id","in":"query","description":"Order Id","required":false,"schema":{"type":"string"}},{"name":"stores","in":"query","description":"Selected Stores","required":false,"schema":{"type":"string"}},{"name":"status","in":"query","description":"Status of order","required":false,"schema":{"type":"string"}},{"name":"dp","in":"query","description":"Delivery Partners","required":false,"schema":{"type":"string"}},{"name":"shorten_urls","in":"query","description":"Shorten URL option","required":false,"schema":{"type":"boolean"}},{"name":"filter_type","in":"query","description":"Filters","required":false,"schema":{"type":"string"}}],"query":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"type":"string"}},{"name":"page_size","in":"query","description":"Page limit","required":false,"schema":{"type":"string"}},{"name":"from_date","in":"query","description":"From Date","required":false,"schema":{"type":"string"}},{"name":"to_date","in":"query","description":"To Date","required":false,"schema":{"type":"string"}},{"name":"q","in":"query","description":"Keyword for Search","required":false,"schema":{"type":"string"}},{"name":"stage","in":"query","description":"Specefic Order Stage","required":false,"schema":{"type":"string"}},{"name":"sales_channels","in":"query","description":"Selected Sales Channel","required":false,"schema":{"type":"string"}},{"name":"order_id","in":"query","description":"Order Id","required":false,"schema":{"type":"string"}},{"name":"stores","in":"query","description":"Selected Stores","required":false,"schema":{"type":"string"}},{"name":"status","in":"query","description":"Status of order","required":false,"schema":{"type":"string"}},{"name":"dp","in":"query","description":"Delivery Partners","required":false,"schema":{"type":"string"}},{"name":"shorten_urls","in":"query","description":"Shorten URL option","required":false,"schema":{"type":"boolean"}},{"name":"filter_type","in":"query","description":"Filters","required":false,"schema":{"type":"string"}}],"headers":[]}""", page_no=page_no, page_size=page_size, from_date=from_date, to_date=to_date, q=q, stage=stage, sales_channels=sales_channels, order_id=order_id, stores=stores, status=status, dp=dp, shorten_urls=shorten_urls, filter_type=filter_type)
query_string = await create_query_string(page_no=page_no, page_size=page_size, from_date=from_date, to_date=to_date, q=q, stage=stage, sales_channels=sales_channels, order_id=order_id, stores=stores, status=status, dp=dp, shorten_urls=shorten_urls, filter_type=filter_type)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/order/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/orders", page_no=page_no, page_size=page_size, from_date=from_date, to_date=to_date, q=q, stage=stage, sales_channels=sales_channels, order_id=order_id, stores=stores, status=status, dp=dp, shorten_urls=shorten_urls, filter_type=filter_type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Catalog:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def updateSearchKeywords(self, id=None, body=""):
"""Update Search Keyword by its id. On successful request, returns the updated collection
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.updateSearchKeywords()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateSearchKeyword import CreateSearchKeyword
schema = CreateSearchKeyword()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteSearchKeywords(self, id=None):
"""Delete a keywords by it's id. Returns an object that tells whether the keywords was deleted successfully
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.deleteSearchKeywords()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getSearchKeywords(self, id=None):
"""Get the details of a words by its `id`. If successful, returns a Collection resource in the response body specified in `GetSearchWordsDetailResponseSchema`
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to retrieve. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.getSearchKeywords()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to retrieve.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCustomKeyword(self, body=""):
"""Create a Custom Search Keywords. See `CreateSearchKeywordSchema` for the list of attributes needed to create a mapping and /collections/query-options for the available options to create a rule. On successful request, returns a paginated list of collections specified in `CreateSearchKeywordSchema`
"""
payload = {}
# Parameter validation
schema = CatalogValidator.createCustomKeyword()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateSearchKeyword import CreateSearchKeyword
schema = CreateSearchKeyword()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAllSearchKeyword(self, ):
"""Custom Search Keyword allows you to map conditions with keywords to give you the ultimate results
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getAllSearchKeyword()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/keyword/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAutocompleteKeyword(self, id=None, body=""):
"""Update a mapping by it's id. On successful request, returns the updated Keyword mapping
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.updateAutocompleteKeyword()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateAutocompleteKeyword import CreateAutocompleteKeyword
schema = CreateAutocompleteKeyword()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteAutocompleteKeyword(self, id=None):
"""Delete a keywords by it's id. Returns an object that tells whether the keywords was deleted successfully
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.deleteAutocompleteKeyword()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to delete.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAutocompleteKeywordDetail(self, id=None):
"""Get the details of a words by its `id`. If successful, returns a keywords resource in the response body specified in `GetAutocompleteWordsResponseSchema`
:param id : A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to retrieve. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.getAutocompleteKeywordDetail()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier for a particular detail. Pass the `id` of the keywords which you want to retrieve.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCustomAutocompleteRule(self, body=""):
"""Create a Custom Autocomplete Keywords. See `CreateAutocompleteKeywordSchema` for the list of attributes needed to create a mapping and /collections/query-options for the available options to create a rule. On successful request, returns a paginated list of collections specified in `CreateAutocompleteKeywordSchema`
"""
payload = {}
# Parameter validation
schema = CatalogValidator.createCustomAutocompleteRule()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateAutocompleteKeyword import CreateAutocompleteKeyword
schema = CreateAutocompleteKeyword()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAutocompleteConfig(self, ):
"""Custom Autocomplete Keyword allows you to map conditions with keywords to give you the ultimate results
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getAutocompleteConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/search/autocomplete/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCatalogConfiguration(self, ):
"""configuration meta details for catalog.
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getCatalogConfiguration()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/metadata/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/metadata/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createConfigurationProductListing(self, body=""):
"""Add configuration for products & listing.
"""
payload = {}
# Parameter validation
schema = CatalogValidator.createConfigurationProductListing()
schema.dump(schema.load(payload))
# Body validation
from .models.AppConfiguration import AppConfiguration
schema = AppConfiguration()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getConfigurations(self, ):
"""configured details for catalog.
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getConfigurations()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createConfigurationByType(self, type=None, body=""):
"""Add configuration for categories & brands.
:param type : type can be brands, categories etc. : type string
"""
payload = {}
if type:
payload["type"] = type
# Parameter validation
schema = CatalogValidator.createConfigurationByType()
schema.dump(schema.load(payload))
# Body validation
from .models.AppConfiguration import AppConfiguration
schema = AppConfiguration()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/{type}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"type","description":"type can be brands, categories etc.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", type=type)
query_string = await create_query_string(type=type)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/{type}/", type=type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getConfigurationByType(self, type=None):
"""configured details for catalog.
:param type : type can be brands, categories etc. : type string
"""
payload = {}
if type:
payload["type"] = type
# Parameter validation
schema = CatalogValidator.getConfigurationByType()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/{type}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"type","description":"type can be brands, categories etc.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", type=type)
query_string = await create_query_string(type=type)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/product-configuration/{type}/", type=type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getQueryFilters(self, ):
"""Get query filters to configure a collection
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getQueryFilters()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/query-options/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/query-options/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCollection(self, body=""):
"""Create a collection. See `CreateCollectionRequestSchema` for the list of attributes needed to create a collection and collections/query-options for the available options to create a collection. On successful request, returns a paginated list of collections specified in `CollectionCreateResponse`
"""
payload = {}
# Parameter validation
schema = CatalogValidator.createCollection()
schema.dump(schema.load(payload))
# Body validation
from .models.CreateCollection import CreateCollection
schema = CreateCollection()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAllCollections(self, ):
"""A Collection allows you to organize your products into hierarchical groups. For example, a dress might be in the category _Clothing_, the individual product might also be in the collection _Summer_. On successful request, returns all the collections as specified in `CollectionListingSchema`
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getAllCollections()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCollectionDetail(self, slug=None):
"""Get the details of a collection by its `slug`. If successful, returns a Collection resource in the response body specified in `CollectionDetailResponse`
:param slug : A `slug` is a human readable, URL friendly unique identifier of an object. Pass the `slug` of the collection which you want to retrieve. : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = CatalogValidator.getCollectionDetail()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{slug}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"slug","description":"A `slug` is a human readable, URL friendly unique identifier of an object. Pass the `slug` of the collection which you want to retrieve.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{slug}/", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateCollection(self, id=None, body=""):
"""Update a collection by it's id. On successful request, returns the updated collection
:param id : A `id` is a unique identifier of a collection. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.updateCollection()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateCollection import UpdateCollection
schema = UpdateCollection()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier of a collection.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def deleteCollection(self, id=None):
"""Delete a collection by it's id. Returns an object that tells whether the collection was deleted successfully
:param id : A `id` is a unique identifier of a collection. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.deleteCollection()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier of a collection.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addCollectionItems(self, id=None, body=""):
"""Adds items to a collection specified by its `id`. See `CollectionItemRequest` for the list of attributes needed to add items to an collection.
:param id : A `id` is a unique identifier of a collection. : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CatalogValidator.addCollectionItems()
schema.dump(schema.load(payload))
# Body validation
from .models.CollectionItemRequest import CollectionItemRequest
schema = CollectionItemRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/items/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier of a collection.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/items/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getCollectionItems(self, id=None, sort_on=None, page_id=None, page_size=None):
"""Get items from a collection specified by its `id`.
:param id : A `id` is a unique identifier of a collection. : type string
:param sort_on : Each response will contain sort_on param, which should be sent back to make pagination work. : type string
:param page_id : Each response will contain next_id param, which should be sent back to make pagination work. : type string
:param page_size : Number of items to retrieve in each page. Default is 12. : type integer
"""
payload = {}
if id:
payload["id"] = id
if sort_on:
payload["sort_on"] = sort_on
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = CatalogValidator.getCollectionItems()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/items/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"id","description":"A `id` is a unique identifier of a collection.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"sort_on","description":"Each response will contain sort_on param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_id","description":"Each response will contain next_id param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer"},"required":false}],"query":[{"in":"query","name":"sort_on","description":"Each response will contain sort_on param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_id","description":"Each response will contain next_id param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer"},"required":false}],"headers":[]}""", id=id, sort_on=sort_on, page_id=page_id, page_size=page_size)
query_string = await create_query_string(id=id, sort_on=sort_on, page_id=page_id, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/collections/{id}/items/", id=id, sort_on=sort_on, page_id=page_id, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCatalogInsights(self, brand=None):
"""Catalog Insights api returns the count of catalog related data like products, brands, departments and categories that have been made live as per configuration of the app.
:param brand : Brand slug : type string
"""
payload = {}
if brand:
payload["brand"] = brand
# Parameter validation
schema = CatalogValidator.getCatalogInsights()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/analytics/insights/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"brand","description":"Brand slug","schema":{"type":"string"},"required":false}],"query":[{"in":"query","name":"brand","description":"Brand slug","schema":{"type":"string"},"required":false}],"headers":[]}""", brand=brand)
query_string = await create_query_string(brand=brand)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/analytics/insights/", brand=brand), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getApplicationBrands(self, department=None, page_no=None, page_size=None):
"""A brand is the name under which a product is being sold. Use this API to list all the brands. You can pass optionally filter the brands by the department. If successful, returns a paginated list of brands specified in `BrandListingResponse`
:param department : The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API : type string
:param page_no : The page number to navigate through the given set of results : type integer
:param page_size : Number of items to retrieve in each page. Default is 12. : type integer
"""
payload = {}
if department:
payload["department"] = department
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = CatalogValidator.getApplicationBrands()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/brands", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"department","description":"The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API","schema":{"type":"string","enum":["baby-care-kids-essentials","beauty-personal-care","home-living","kids","men","others","toys","women"]},"required":false},{"in":"query","name":"page_no","description":"The page number to navigate through the given set of results","schema":{"type":"integer"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer","default":12},"required":false}],"query":[{"in":"query","name":"department","description":"The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API","schema":{"type":"string","enum":["baby-care-kids-essentials","beauty-personal-care","home-living","kids","men","others","toys","women"]},"required":false},{"in":"query","name":"page_no","description":"The page number to navigate through the given set of results","schema":{"type":"integer"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer","default":12},"required":false}],"headers":[]}""", department=department, page_no=page_no, page_size=page_size)
query_string = await create_query_string(department=department, page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/brands", department=department, page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getDepartments(self, ):
"""Departments are a way to categorise similar products. A product can lie in multiple departments. For example, a skirt can below to the 'Women's Fashion' Department while a handbag can lie in 'Women's Accessories' Department. Use this API to list all the departments. If successful, returns the list of departments specified in `DepartmentResponse`
"""
payload = {}
# Parameter validation
schema = CatalogValidator.getDepartments()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/departments", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getCategories(self, department=None):
"""List all the categories. You can optionally pass filter the brands by the department. If successful, returns a paginated list of brands specified in `CategoryListingResponse`
:param department : The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API : type string
"""
payload = {}
if department:
payload["department"] = department
# Parameter validation
schema = CatalogValidator.getCategories()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/categories", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"department","description":"The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API","schema":{"type":"string","enum":["baby-care-kids-essentials","beauty-personal-care","home-living","kids","men","others","toys","women"]},"required":false}],"query":[{"in":"query","name":"department","description":"The name of the department. Use this parameter to filter products by a particular department. See below the list of available departments. You can retrieve available departments from the **v1.0/departments/** API","schema":{"type":"string","enum":["baby-care-kids-essentials","beauty-personal-care","home-living","kids","men","others","toys","women"]},"required":false}],"headers":[]}""", department=department)
query_string = await create_query_string(department=department)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/categories", department=department), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAppicationProducts(self, q=None, f=None, filters=None, sort_on=None, page_id=None, page_size=None, page_no=None, page_type=None, item_ids=None):
"""List all the products associated with a brand, collection or category in a requested sort order. The API additionally supports arbitrary search queries that may refer the name of any product, brand, category or collection. If successful, returns a paginated list of products specified in `ApplicationProductListingResponse`
:param q : The search query. This can be a partial or complete name of a either a product, brand or category : type string
:param f : The search filter parameters. All the parameter filtered from filter parameters will be passed in **f** parameter in this format. **?f=brand:voi-jeans||and:::category:t-shirts||shirts** : type string
:param filters : Pass `filters` parameter to fetch the filter details. This flag is used to fetch all filters : type boolean
:param sort_on : The order to sort the list of products on. The supported sort parameters are popularity, price, redemption and discount in either ascending or descending order. See the supported values below. : type string
:param page_id : Each response will contain **page_id** param, which should be sent back to make pagination work. : type string
:param page_size : Number of items to retrieve in each page. Default is 12. : type integer
:param page_no : If page_type is number then pass it to fetch page items. Default is 1. : type integer
:param page_type : For pagination type should be cursor or number. Default is cursor. : type string
:param item_ids : Item Ids of product : type array
"""
payload = {}
if q:
payload["q"] = q
if f:
payload["f"] = f
if filters:
payload["filters"] = filters
if sort_on:
payload["sort_on"] = sort_on
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
if page_no:
payload["page_no"] = page_no
if page_type:
payload["page_type"] = page_type
if item_ids:
payload["item_ids"] = item_ids
# Parameter validation
schema = CatalogValidator.getAppicationProducts()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/products", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"q","description":"The search query. This can be a partial or complete name of a either a product, brand or category","schema":{"type":"string"},"required":false},{"in":"query","name":"f","description":"The search filter parameters. All the parameter filtered from filter parameters will be passed in **f** parameter in this format. **?f=brand:voi-jeans||and:::category:t-shirts||shirts**","schema":{"type":"string"},"required":false},{"in":"query","name":"filters","description":"Pass `filters` parameter to fetch the filter details. This flag is used to fetch all filters","schema":{"type":"boolean","default":true},"required":false},{"in":"query","name":"sort_on","description":"The order to sort the list of products on. The supported sort parameters are popularity, price, redemption and discount in either ascending or descending order. See the supported values below.","schema":{"type":"string","enum":["latest","popular","price_asc","price_dsc","discount_asc","discount_dsc"]},"required":false},{"in":"query","name":"page_id","description":"Each response will contain **page_id** param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer","default":12},"required":false},{"in":"query","name":"page_no","description":"If page_type is number then pass it to fetch page items. Default is 1.","schema":{"type":"integer","default":1},"required":false},{"in":"query","name":"page_type","description":"For pagination type should be cursor or number. Default is cursor.","schema":{"type":"string","default":"cursor"},"required":false},{"in":"query","name":"item_ids","description":"Item Ids of product","schema":{"type":"array","items":{"type":"integer"}},"required":false}],"query":[{"in":"query","name":"q","description":"The search query. This can be a partial or complete name of a either a product, brand or category","schema":{"type":"string"},"required":false},{"in":"query","name":"f","description":"The search filter parameters. All the parameter filtered from filter parameters will be passed in **f** parameter in this format. **?f=brand:voi-jeans||and:::category:t-shirts||shirts**","schema":{"type":"string"},"required":false},{"in":"query","name":"filters","description":"Pass `filters` parameter to fetch the filter details. This flag is used to fetch all filters","schema":{"type":"boolean","default":true},"required":false},{"in":"query","name":"sort_on","description":"The order to sort the list of products on. The supported sort parameters are popularity, price, redemption and discount in either ascending or descending order. See the supported values below.","schema":{"type":"string","enum":["latest","popular","price_asc","price_dsc","discount_asc","discount_dsc"]},"required":false},{"in":"query","name":"page_id","description":"Each response will contain **page_id** param, which should be sent back to make pagination work.","schema":{"type":"string"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 12.","schema":{"type":"integer","default":12},"required":false},{"in":"query","name":"page_no","description":"If page_type is number then pass it to fetch page items. Default is 1.","schema":{"type":"integer","default":1},"required":false},{"in":"query","name":"page_type","description":"For pagination type should be cursor or number. Default is cursor.","schema":{"type":"string","default":"cursor"},"required":false},{"in":"query","name":"item_ids","description":"Item Ids of product","schema":{"type":"array","items":{"type":"integer"}},"required":false}],"headers":[]}""", q=q, f=f, filters=filters, sort_on=sort_on, page_id=page_id, page_size=page_size, page_no=page_no, page_type=page_type, item_ids=item_ids)
query_string = await create_query_string(q=q, f=f, filters=filters, sort_on=sort_on, page_id=page_id, page_size=page_size, page_no=page_no, page_type=page_type, item_ids=item_ids)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/products", q=q, f=f, filters=filters, sort_on=sort_on, page_id=page_id, page_size=page_size, page_no=page_no, page_type=page_type, item_ids=item_ids), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getProductDetailBySlug(self, slug=None):
"""Products are the core resource of an application. Products can be associated by categories, collections, brands and more. This API retrieves the product specified by the given **slug**. If successful, returns a Product resource in the response body specified in `ProductDetail`
:param slug : The unique identifier of a product. i.e; `slug` of a product. You can retrieve these from the APIs that list products like **v1.0/products/** : type string
"""
payload = {}
if slug:
payload["slug"] = slug
# Parameter validation
schema = CatalogValidator.getProductDetailBySlug()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/products/{slug}", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true},{"in":"path","name":"slug","description":"The unique identifier of a product. i.e; `slug` of a product. You can retrieve these from the APIs that list products like **v1.0/products/**","schema":{"type":"string"},"required":true}],"optional":[],"query":[],"headers":[]}""", slug=slug)
query_string = await create_query_string(slug=slug)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/products/{slug}", slug=slug), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAppProducts(self, brand_ids=None, category_ids=None, department_ids=None, tags=None, page_no=None, page_size=None, q=None):
"""Products are the core resource of an application. Products can be associated by categories, collections, brands and more. If successful, returns a Product resource in the response body specified in `ApplicationProductListingResponseDatabasePowered`
:param brand_ids : Get multiple products filtered by Brand Ids : type array
:param category_ids : Get multiple products filtered by Category Ids : type array
:param department_ids : Get multiple products filtered by Department Ids : type array
:param tags : Get multiple products filtered by tags : type array
:param page_no : The page number to navigate through the given set of results : type integer
:param page_size : Number of items to retrieve in each page. Default is 10. : type integer
:param q : Search with Item Code, Name, Slug or Identifier. : type string
"""
payload = {}
if brand_ids:
payload["brand_ids"] = brand_ids
if category_ids:
payload["category_ids"] = category_ids
if department_ids:
payload["department_ids"] = department_ids
if tags:
payload["tags"] = tags
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if q:
payload["q"] = q
# Parameter validation
schema = CatalogValidator.getAppProducts()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/raw-products/", """{"required":[{"in":"path","name":"company_id","description":"A `company_id` is a unique identifier for a particular seller account.","schema":{"type":"string"},"required":true},{"in":"path","name":"application_id","description":"A `application_id` is a unique identifier for a particular sale channel.","schema":{"type":"string"},"required":true}],"optional":[{"in":"query","name":"brand_ids","description":"Get multiple products filtered by Brand Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"category_ids","description":"Get multiple products filtered by Category Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"department_ids","description":"Get multiple products filtered by Department Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"tags","description":"Get multiple products filtered by tags","schema":{"type":"array","items":{"type":"string"}},"required":false},{"in":"query","name":"page_no","description":"The page number to navigate through the given set of results","schema":{"type":"integer"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 10.","schema":{"type":"integer","default":10},"required":false},{"in":"query","name":"q","description":"Search with Item Code, Name, Slug or Identifier.","schema":{"type":"string"},"required":false}],"query":[{"in":"query","name":"brand_ids","description":"Get multiple products filtered by Brand Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"category_ids","description":"Get multiple products filtered by Category Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"department_ids","description":"Get multiple products filtered by Department Ids","schema":{"type":"array","items":{"type":"integer"}},"required":false},{"in":"query","name":"tags","description":"Get multiple products filtered by tags","schema":{"type":"array","items":{"type":"string"}},"required":false},{"in":"query","name":"page_no","description":"The page number to navigate through the given set of results","schema":{"type":"integer"},"required":false},{"in":"query","name":"page_size","description":"Number of items to retrieve in each page. Default is 10.","schema":{"type":"integer","default":10},"required":false},{"in":"query","name":"q","description":"Search with Item Code, Name, Slug or Identifier.","schema":{"type":"string"},"required":false}],"headers":[]}""", brand_ids=brand_ids, category_ids=category_ids, department_ids=department_ids, tags=tags, page_no=page_no, page_size=page_size, q=q)
query_string = await create_query_string(brand_ids=brand_ids, category_ids=category_ids, department_ids=department_ids, tags=tags, page_no=page_no, page_size=page_size, q=q)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/catalog/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/raw-products/", brand_ids=brand_ids, category_ids=category_ids, department_ids=department_ids, tags=tags, page_no=page_no, page_size=page_size, q=q), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class CompanyProfile:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class FileStorage:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def appStartUpload(self, namespace=None, body=""):
"""Uploads an arbitrarily sized buffer or blob.
It has three Major Steps:
* Start
* Upload
* Complete
### Start
Initiates the assets upload using `appStartUpload`.
It returns the storage link in response.
### Upload
Use the storage link to upload a file (Buffer or Blob) to the File Storage.
Make a `PUT` request on storage link received from `appStartUpload` api with file (Buffer or Blob) as a request body.
### Complete
After successfully upload, call `appCompleteUpload` api to complete the upload process.
This operation will return the url for the uploaded file.
:param namespace : bucket name : type string
"""
payload = {}
if namespace:
payload["namespace"] = namespace
# Parameter validation
schema = FileStorageValidator.appStartUpload()
schema.dump(schema.load(payload))
# Body validation
from .models.StartRequest import StartRequest
schema = StartRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/upload/start/", """{"required":[{"name":"namespace","in":"path","description":"bucket name","required":true,"schema":{"type":"string"}},{"name":"company_id","in":"path","description":"company_id","required":true,"schema":{"type":"integer"}},{"name":"application_id","in":"path","description":"application id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", namespace=namespace, )
query_string = await create_query_string(namespace=namespace, )
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/upload/start/", namespace=namespace, ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def appCompleteUpload(self, namespace=None, body=""):
"""Uploads an arbitrarily sized buffer or blob.
It has three Major Steps:
* Start
* Upload
* Complete
### Start
Initiates the assets upload using `appStartUpload`.
It returns the storage link in response.
### Upload
Use the storage link to upload a file (Buffer or Blob) to the File Storage.
Make a `PUT` request on storage link received from `appStartUpload` api with file (Buffer or Blob) as a request body.
### Complete
After successfully upload, call `appCompleteUpload` api to complete the upload process.
This operation will return the url for the uploaded file.
:param namespace : bucket name : type string
"""
payload = {}
if namespace:
payload["namespace"] = namespace
# Parameter validation
schema = FileStorageValidator.appCompleteUpload()
schema.dump(schema.load(payload))
# Body validation
from .models.StartResponse import StartResponse
schema = StartResponse()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/upload/complete/", """{"required":[{"name":"namespace","in":"path","description":"bucket name","required":true,"schema":{"type":"string"}},{"name":"company_id","in":"path","description":"company_id","required":true,"schema":{"type":"integer"}},{"name":"application_id","in":"path","description":"application id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", namespace=namespace, )
query_string = await create_query_string(namespace=namespace, )
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/upload/complete/", namespace=namespace, ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def appCopyFiles(self, sync=None, body=""):
"""Copy Files
:param sync : sync : type boolean
"""
payload = {}
if sync:
payload["sync"] = sync
# Parameter validation
schema = FileStorageValidator.appCopyFiles()
schema.dump(schema.load(payload))
# Body validation
from .models.BulkRequest import BulkRequest
schema = BulkRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/uploads/copy/", """{"required":[{"name":"company_id","in":"path","description":"company_id","required":true,"schema":{"type":"integer"}},{"name":"application_id","in":"path","description":"application_id","required":true,"schema":{"type":"integer"}}],"optional":[{"name":"sync","in":"query","description":"sync","required":false,"schema":{"type":"boolean"}}],"query":[{"name":"sync","in":"query","description":"sync","required":false,"schema":{"type":"boolean"}}],"headers":[]}""", sync=sync, )
query_string = await create_query_string(sync=sync, )
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/uploads/copy/", sync=sync, ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def browse(self, namespace=None, page_no=None):
"""Browse Files
:param namespace : bucket name : type string
:param page_no : page no : type integer
"""
payload = {}
if namespace:
payload["namespace"] = namespace
if page_no:
payload["page_no"] = page_no
# Parameter validation
schema = FileStorageValidator.browse()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/browse/", """{"required":[{"name":"namespace","in":"path","description":"bucket name","required":true,"schema":{"type":"string"}},{"name":"company_id","in":"path","description":"company_id","required":true,"schema":{"type":"integer"}},{"name":"application_id","in":"path","description":"application_id","required":true,"schema":{"type":"integer"}}],"optional":[{"name":"page_no","in":"query","description":"page no","required":false,"schema":{"type":"integer"}}],"query":[{"name":"page_no","in":"query","description":"page no","required":false,"schema":{"type":"integer"}}],"headers":[]}""", namespace=namespace, page_no=page_no)
query_string = await create_query_string(namespace=namespace, page_no=page_no)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/assets/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/namespaces/{namespace}/browse/", namespace=namespace, page_no=page_no), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Share:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def createShortLink(self, body=""):
"""Create short link
"""
payload = {}
# Parameter validation
schema = ShareValidator.createShortLink()
schema.dump(schema.load(payload))
# Body validation
from .models.ShortLinkReq import ShortLinkReq
schema = ShortLinkReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getShortLinks(self, page_no=None, page_size=None, created_by=None, active=None, q=None):
"""Get short links
:param page_no : Current page number : type integer
:param page_size : Current page size : type integer
:param created_by : Short link creator : type string
:param active : Short link active status : type string
:param q : Search text for original and short url : type string
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if created_by:
payload["created_by"] = created_by
if active:
payload["active"] = active
if q:
payload["q"] = q
# Parameter validation
schema = ShareValidator.getShortLinks()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"default":1,"type":"integer"}},{"name":"page_size","in":"query","description":"Current page size","required":false,"schema":{"default":10,"type":"integer"}},{"name":"created_by","in":"query","description":"Short link creator","required":false,"schema":{"type":"string","enum":["team"]}},{"name":"active","in":"query","description":"Short link active status","required":false,"schema":{"type":"string","enum":[true,false]}},{"name":"q","in":"query","description":"Search text for original and short url","required":false,"schema":{"type":"string"}}],"query":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"default":1,"type":"integer"}},{"name":"page_size","in":"query","description":"Current page size","required":false,"schema":{"default":10,"type":"integer"}},{"name":"created_by","in":"query","description":"Short link creator","required":false,"schema":{"type":"string","enum":["team"]}},{"name":"active","in":"query","description":"Short link active status","required":false,"schema":{"type":"string","enum":[true,false]}},{"name":"q","in":"query","description":"Search text for original and short url","required":false,"schema":{"type":"string"}}],"headers":[]}""", page_no=page_no, page_size=page_size, created_by=created_by, active=active, q=q)
query_string = await create_query_string(page_no=page_no, page_size=page_size, created_by=created_by, active=active, q=q)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/", page_no=page_no, page_size=page_size, created_by=created_by, active=active, q=q), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getShortLinkByHash(self, hash=None):
"""Get short link by hash
:param hash : Hash of short url : type string
"""
payload = {}
if hash:
payload["hash"] = hash
# Parameter validation
schema = ShareValidator.getShortLinkByHash()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/{hash}/", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"hash","in":"path","description":"Hash of short url","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", hash=hash)
query_string = await create_query_string(hash=hash)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/{hash}/", hash=hash), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateShortLinkById(self, id=None, body=""):
"""Update short link by id
:param id : Short link document identifier : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ShareValidator.updateShortLinkById()
schema.dump(schema.load(payload))
# Body validation
from .models.ShortLinkReq import ShortLinkReq
schema = ShortLinkReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/{id}/", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"id","in":"path","description":"Short link document identifier","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/share/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/links/short-link/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
class Inventory:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class Configuration:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getBuildConfig(self, platform_type=None):
"""Get latest build config
:param platform_type : Current platform name : type string
"""
payload = {}
if platform_type:
payload["platform_type"] = platform_type
# Parameter validation
schema = ConfigurationValidator.getBuildConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/configuration", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"schema":{"type":"string","enum":["android","ios"]},"description":"Current platform name","in":"path","required":true,"name":"platform_type"}],"optional":[],"query":[],"headers":[]}""", platform_type=platform_type)
query_string = await create_query_string(platform_type=platform_type)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/configuration", platform_type=platform_type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateBuildConfig(self, platform_type=None, body=""):
"""Update build config for next build
:param platform_type : Current platform name : type string
"""
payload = {}
if platform_type:
payload["platform_type"] = platform_type
# Parameter validation
schema = ConfigurationValidator.updateBuildConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.MobileAppConfigRequest import MobileAppConfigRequest
schema = MobileAppConfigRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/configuration", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"schema":{"type":"string","enum":["android","ios"]},"description":"Current platform name","in":"path","required":true,"name":"platform_type"}],"optional":[],"query":[],"headers":[]}""", platform_type=platform_type)
query_string = await create_query_string(platform_type=platform_type)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/configuration", platform_type=platform_type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getPreviousVersions(self, platform_type=None):
"""Get previous build versions
:param platform_type : Current platform name : type string
"""
payload = {}
if platform_type:
payload["platform_type"] = platform_type
# Parameter validation
schema = ConfigurationValidator.getPreviousVersions()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/versions", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"schema":{"type":"string","enum":["android","ios"]},"description":"Current platform name","in":"path","required":true,"name":"platform_type"}],"optional":[],"query":[],"headers":[]}""", platform_type=platform_type)
query_string = await create_query_string(platform_type=platform_type)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/build/{platform_type}/versions", platform_type=platform_type), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAppFeatures(self, ):
"""Get features of application
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppFeatures()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/feature", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/feature", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAppFeatures(self, body=""):
"""Update features of application
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateAppFeatures()
schema.dump(schema.load(payload))
# Body validation
from .models.AppFeatureRequest import AppFeatureRequest
schema = AppFeatureRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/feature", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/feature", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppBasicDetails(self, ):
"""Get basic application details like name
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppBasicDetails()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/detail", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/detail", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAppBasicDetails(self, body=""):
"""Add or update application's basic details
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateAppBasicDetails()
schema.dump(schema.load(payload))
# Body validation
from .models.ApplicationDetail import ApplicationDetail
schema = ApplicationDetail()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/detail", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/detail", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppContactInfo(self, ):
"""Get Application Current Information. This includes information about social links, address and contact information of company/seller/brand of the application.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppContactInfo()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/information", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/information", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAppContactInfo(self, body=""):
"""Save Application Current Information. This includes information about social links, address and contact information of an application.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateAppContactInfo()
schema.dump(schema.load(payload))
# Body validation
from .models.ApplicationInformation import ApplicationInformation
schema = ApplicationInformation()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/information", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/information", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppApiTokens(self, ):
"""Get social tokens.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppApiTokens()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/token", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/token", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAppApiTokens(self, body=""):
"""Add social tokens.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateAppApiTokens()
schema.dump(schema.load(payload))
# Body validation
from .models.TokenResponse import TokenResponse
schema = TokenResponse()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/token", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/token", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppCompanies(self, page_no=None, page_size=None):
"""Application inventory enabled companies.
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ConfigurationValidator.getAppCompanies()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/companies", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/companies", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAppStores(self, page_no=None, page_size=None):
"""Application inventory enabled stores.
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ConfigurationValidator.getAppStores()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stores", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stores", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getInventoryConfig(self, ):
"""Get application configuration for various features and data
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getInventoryConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateInventoryConfig(self, body=""):
"""Update application configuration for various features and data
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateInventoryConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.ApplicationInventory import ApplicationInventory
schema = ApplicationInventory()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def partiallyUpdateInventoryConfig(self, body=""):
"""Partially update application configuration for various features and data
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.partiallyUpdateInventoryConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.AppInventoryPartialUpdate import AppInventoryPartialUpdate
schema = AppInventoryPartialUpdate()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/configuration", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppCurrencyConfig(self, ):
"""Get application enabled currency list
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppCurrencyConfig()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateAppCurrencyConfig(self, body=""):
"""Add initial application supported currency for various features and data. Default INR will be enabled.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateAppCurrencyConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.AppSupportedCurrency import AppSupportedCurrency
schema = AppSupportedCurrency()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getAppSupportedCurrency(self, ):
"""Use this API to get a list of currencies allowed in the current application. Moreover, get the name, code, symbol, and the decimal digits of the currencies.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getAppSupportedCurrency()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency/supported", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/currency/supported", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getOrderingStoresByFilter(self, page_no=None, page_size=None, body=""):
"""Get ordering store by filter
:param page_no : Current page no : type integer
:param page_size : Current request items count : type integer
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = ConfigurationValidator.getOrderingStoresByFilter()
schema.dump(schema.load(payload))
# Body validation
from .models.FilterOrderingStoreRequest import FilterOrderingStoreRequest
schema = FilterOrderingStoreRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store/stores/filter", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"Current page no"},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"Current request items count"}],"headers":[]}""", page_no=page_no, page_size=page_size)
query_string = await create_query_string(page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store/stores/filter", page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateOrderingStoreConfig(self, body=""):
"""Add/Update ordering store config.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.updateOrderingStoreConfig()
schema.dump(schema.load(payload))
# Body validation
from .models.OrderingStoreConfig import OrderingStoreConfig
schema = OrderingStoreConfig()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getStaffOrderingStores(self, page_no=None, page_size=None, q=None):
"""Use this API to retrieve the details of all stores access given to the staff member (the selling locations where the application will be utilized for placing orders).
:param page_no : The page number to navigate through the given set of results. Default value is 1. : type integer
:param page_size : The number of items to retrieve in each page. Default value is 10. : type integer
:param q : Store code or name of the ordering store. : type string
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if q:
payload["q"] = q
# Parameter validation
schema = ConfigurationValidator.getStaffOrderingStores()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store/staff-stores", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"The page number to navigate through the given set of results. Default value is 1."},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"The number of items to retrieve in each page. Default value is 10."},{"name":"q","in":"query","schema":{"type":"string"},"description":"Store code or name of the ordering store."}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer"},"description":"The page number to navigate through the given set of results. Default value is 1."},{"name":"page_size","in":"query","schema":{"type":"integer"},"description":"The number of items to retrieve in each page. Default value is 10."},{"name":"q","in":"query","schema":{"type":"string"},"description":"Store code or name of the ordering store."}],"headers":[]}""", page_no=page_no, page_size=page_size, q=q)
query_string = await create_query_string(page_no=page_no, page_size=page_size, q=q)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/ordering-store/staff-stores", page_no=page_no, page_size=page_size, q=q), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getDomains(self, ):
"""Get attached domain list.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getDomains()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def addDomain(self, body=""):
"""Add new domain to application.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.addDomain()
schema.dump(schema.load(payload))
# Body validation
from .models.DomainAddRequest import DomainAddRequest
schema = DomainAddRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def removeDomainById(self, id=None):
"""Remove attached domain.
:param id : Domain _id : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = ConfigurationValidator.removeDomainById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/{id}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"name":"id","in":"path","required":true,"description":"Domain _id","schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def changeDomainType(self, body=""):
"""Change a domain to Primary or Shortlink domain
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.changeDomainType()
schema.dump(schema.load(payload))
# Body validation
from .models.UpdateDomainTypeRequest import UpdateDomainTypeRequest
schema = UpdateDomainTypeRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/set-domain", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/set-domain", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getDomainStatus(self, body=""):
"""Get domain connected status. Check if domain is live and mapped to appropriate IP to fynd servers.
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getDomainStatus()
schema.dump(schema.load(payload))
# Body validation
from .models.DomainStatusRequest import DomainStatusRequest
schema = DomainStatusRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/domain-status", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/domain/domain-status", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getApplicationById(self, ):
"""Get application data from id
"""
payload = {}
# Parameter validation
schema = ConfigurationValidator.getApplicationById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/configuration/v1.0/company/{self._conf.companyId}/application/{self.applicationId}", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Cart:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getCoupons(self, page_no=None, page_size=None, is_archived=None, title=None, is_public=None, is_display=None, type_slug=None, code=None):
"""Get coupon list with pagination
:param page_no : : type integer
:param page_size : : type integer
:param is_archived : : type boolean
:param title : : type string
:param is_public : : type boolean
:param is_display : : type boolean
:param type_slug : : type string
:param code : : type string
"""
payload = {}
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
if is_archived:
payload["is_archived"] = is_archived
if title:
payload["title"] = title
if is_public:
payload["is_public"] = is_public
if is_display:
payload["is_display"] = is_display
if type_slug:
payload["type_slug"] = type_slug
if code:
payload["code"] = code
# Parameter validation
schema = CartValidator.getCoupons()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"}],"optional":[{"name":"page_no","in":"query","schema":{"type":"integer","default":0,"description":"current page no as per pagination"}},{"name":"page_size","in":"query","schema":{"type":"integer","default":10,"description":"Coupon max records fetched in single request"}},{"name":"is_archived","in":"query","schema":{"type":"boolean","description":"Filter by active or inactive coupon","default":false}},{"name":"title","in":"query","schema":{"type":"string","description":"Filter by `title`"}},{"name":"is_public","in":"query","schema":{"type":"boolean","description":"Filter by `is_public`"}},{"name":"is_display","in":"query","schema":{"type":"boolean","description":"Filter by `is_display`"}},{"name":"type_slug","in":"query","schema":{"type":"string","description":"Filter by coupon type"}},{"name":"code","in":"query","schema":{"type":"string","description":"Filter by `code`"}}],"query":[{"name":"page_no","in":"query","schema":{"type":"integer","default":0,"description":"current page no as per pagination"}},{"name":"page_size","in":"query","schema":{"type":"integer","default":10,"description":"Coupon max records fetched in single request"}},{"name":"is_archived","in":"query","schema":{"type":"boolean","description":"Filter by active or inactive coupon","default":false}},{"name":"title","in":"query","schema":{"type":"string","description":"Filter by `title`"}},{"name":"is_public","in":"query","schema":{"type":"boolean","description":"Filter by `is_public`"}},{"name":"is_display","in":"query","schema":{"type":"boolean","description":"Filter by `is_display`"}},{"name":"type_slug","in":"query","schema":{"type":"string","description":"Filter by coupon type"}},{"name":"code","in":"query","schema":{"type":"string","description":"Filter by `code`"}}],"headers":[]}""", page_no=page_no, page_size=page_size, is_archived=is_archived, title=title, is_public=is_public, is_display=is_display, type_slug=type_slug, code=code)
query_string = await create_query_string(page_no=page_no, page_size=page_size, is_archived=is_archived, title=title, is_public=is_public, is_display=is_display, type_slug=type_slug, code=code)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon", page_no=page_no, page_size=page_size, is_archived=is_archived, title=title, is_public=is_public, is_display=is_display, type_slug=type_slug, code=code), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createCoupon(self, body=""):
"""Create new coupon
"""
payload = {}
# Parameter validation
schema = CartValidator.createCoupon()
schema.dump(schema.load(payload))
# Body validation
from .models.CouponAdd import CouponAdd
schema = CouponAdd()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getCouponById(self, id=None):
"""Get single coupon details with `id` in path param
:param id : : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CartValidator.getCouponById()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"},{"name":"id","in":"path","required":true,"schema":{"type":"string","description":"Coupon mongo _id for fetching single coupon data for editing"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateCoupon(self, id=None, body=""):
"""Update coupon with id sent in `id`
:param id : : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CartValidator.updateCoupon()
schema.dump(schema.load(payload))
# Body validation
from .models.CouponUpdate import CouponUpdate
schema = CouponUpdate()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"},{"name":"id","in":"path","schema":{"type":"string","description":"Coupon mongo _id for fetching single coupon data for editing"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def updateCouponPartially(self, id=None, body=""):
"""Update archive/unarchive and change schedule for coupon
:param id : : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = CartValidator.updateCouponPartially()
schema.dump(schema.load(payload))
# Body validation
from .models.CouponPartialUpdate import CouponPartialUpdate
schema = CouponPartialUpdate()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"},{"name":"id","in":"path","schema":{"type":"string","description":"Coupon mongo _id for fetching single coupon data for editing"},"required":true}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/coupon/{id}", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def fetchAndvalidateCartItems(self, body=""):
"""Get all the details of cart for a list of provided `cart_items`
"""
payload = {}
# Parameter validation
schema = CartValidator.fetchAndvalidateCartItems()
schema.dump(schema.load(payload))
# Body validation
from .models.OpenapiCartDetailsRequest import OpenapiCartDetailsRequest
schema = OpenapiCartDetailsRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/validate", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/validate", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def checkCartServiceability(self, body=""):
"""Check Pincode serviceability for cart items provided in `cart_items` and address pincode in `shipping_address`
"""
payload = {}
# Parameter validation
schema = CartValidator.checkCartServiceability()
schema.dump(schema.load(payload))
# Body validation
from .models.OpenApiCartServiceabilityRequest import OpenApiCartServiceabilityRequest
schema = OpenApiCartServiceabilityRequest()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/serviceability", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/serviceability", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def checkoutCart(self, body=""):
"""Generate Fynd order for cart details send with provided `cart_items`
"""
payload = {}
# Parameter validation
schema = CartValidator.checkoutCart()
schema.dump(schema.load(payload))
# Body validation
from .models.OpenApiPlatformCheckoutReq import OpenApiPlatformCheckoutReq
schema = OpenApiPlatformCheckoutReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/checkout", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current Application _id","in":"path","required":true,"name":"application_id"}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/cart/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/checkout", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
class Rewards:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getGiveaways(self, page_id=None, page_size=None):
"""List of giveaways of the current application.
:param page_id : pagination page id : type string
:param page_size : pagination page size : type integer
"""
payload = {}
if page_id:
payload["page_id"] = page_id
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = RewardsValidator.getGiveaways()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"query":[{"description":"pagination page id","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"pagination page size","in":"query","name":"page_size","schema":{"type":"integer"}}],"headers":[]}""", page_id=page_id, page_size=page_size)
query_string = await create_query_string(page_id=page_id, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/", page_id=page_id, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def createGiveaway(self, body=""):
"""Adds a new giveaway.
"""
payload = {}
# Parameter validation
schema = RewardsValidator.createGiveaway()
schema.dump(schema.load(payload))
# Body validation
from .models.Giveaway import Giveaway
schema = Giveaway()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getGiveawayByID(self, id=None):
"""Get giveaway by ID.
:param id : Giveaway ID : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = RewardsValidator.getGiveawayByID()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/{id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"Giveaway ID","in":"path","name":"id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateGiveaway(self, id=None, body=""):
"""Updates the giveaway by it's ID.
:param id : Giveaway ID : type string
"""
payload = {}
if id:
payload["id"] = id
# Parameter validation
schema = RewardsValidator.updateGiveaway()
schema.dump(schema.load(payload))
# Body validation
from .models.Giveaway import Giveaway
schema = Giveaway()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/{id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"Giveaway ID","in":"path","name":"id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", id=id)
query_string = await create_query_string(id=id)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/giveaways/{id}/", id=id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getOffers(self, ):
"""List of offer of the current application.
"""
payload = {}
# Parameter validation
schema = RewardsValidator.getOffers()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getOfferByName(self, cookie=None, name=None):
"""Get offer by name.
:param cookie : User's session cookie. This cookie is set in browser cookie when logged-in to fynd's authentication system i.e. `Grimlock` or by using grimlock-backend SDK for backend implementation. : type string
:param name : Offer name : type string
"""
payload = {}
if cookie:
payload["cookie"] = cookie
if name:
payload["name"] = name
# Parameter validation
schema = RewardsValidator.getOfferByName()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/{name}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"User's session cookie. This cookie is set in browser cookie when logged-in to fynd's authentication system i.e. `Grimlock` or by using grimlock-backend SDK for backend implementation.","in":"header","name":"cookie","required":true,"schema":{"type":"string"}},{"description":"Offer name","in":"path","name":"name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[{"description":"User's session cookie. This cookie is set in browser cookie when logged-in to fynd's authentication system i.e. `Grimlock` or by using grimlock-backend SDK for backend implementation.","in":"header","name":"cookie","required":true,"schema":{"type":"string"}}]}""", cookie=cookie, name=name)
query_string = await create_query_string(cookie=cookie, name=name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/{name}/", cookie=cookie, name=name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateOfferByName(self, name=None, body=""):
"""Updates the offer by name.
:param name : Offer name : type string
"""
payload = {}
if name:
payload["name"] = name
# Parameter validation
schema = RewardsValidator.updateOfferByName()
schema.dump(schema.load(payload))
# Body validation
from .models.Offer import Offer
schema = Offer()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/{name}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"Offer name","in":"path","name":"name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", name=name)
query_string = await create_query_string(name=name)
return await AiohttpHelper().aiohttp_request("PUT", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "put", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/offers/{name}/", name=name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getUserAvailablePoints(self, user_id=None):
"""User's reward details.
:param user_id : user id : type string
"""
payload = {}
if user_id:
payload["user_id"] = user_id
# Parameter validation
schema = RewardsValidator.getUserAvailablePoints()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"user id","in":"path","name":"user_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", user_id=user_id)
query_string = await create_query_string(user_id=user_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/", user_id=user_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def updateUserStatus(self, user_id=None, body=""):
"""Update user status, active/archive
:param user_id : user id : type string
"""
payload = {}
if user_id:
payload["user_id"] = user_id
# Parameter validation
schema = RewardsValidator.updateUserStatus()
schema.dump(schema.load(payload))
# Body validation
from .models.AppUser import AppUser
schema = AppUser()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"user id","in":"path","name":"user_id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", user_id=user_id)
query_string = await create_query_string(user_id=user_id)
return await AiohttpHelper().aiohttp_request("PATCH", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "patch", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/", user_id=user_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def getUserPointsHistory(self, user_id=None, page_id=None, page_limit=None, page_size=None):
"""Get list of points transactions.
The list of points history is paginated.
:param user_id : user id : type string
:param page_id : PageID is the ID of the requested page. For first request it should be kept empty. : type string
:param page_limit : PageLimit is the number of requested items in response. : type integer
:param page_size : PageSize is the number of requested items in response. : type integer
"""
payload = {}
if user_id:
payload["user_id"] = user_id
if page_id:
payload["page_id"] = page_id
if page_limit:
payload["page_limit"] = page_limit
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = RewardsValidator.getUserPointsHistory()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/points/history/", """{"required":[{"description":"company id","in":"path","name":"company_id","required":true,"schema":{"type":"string"}},{"description":"application id","in":"path","name":"application_id","required":true,"schema":{"type":"string"}},{"description":"user id","in":"path","name":"user_id","required":true,"schema":{"type":"string"}}],"optional":[{"description":"PageID is the ID of the requested page. For first request it should be kept empty.","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"PageLimit is the number of requested items in response.","in":"query","name":"page_limit","schema":{"type":"integer"}},{"description":"PageSize is the number of requested items in response.","in":"query","name":"page_size","schema":{"type":"integer"}}],"query":[{"description":"PageID is the ID of the requested page. For first request it should be kept empty.","in":"query","name":"page_id","schema":{"type":"string"}},{"description":"PageLimit is the number of requested items in response.","in":"query","name":"page_limit","schema":{"type":"integer"}},{"description":"PageSize is the number of requested items in response.","in":"query","name":"page_size","schema":{"type":"integer"}}],"headers":[]}""", user_id=user_id, page_id=page_id, page_limit=page_limit, page_size=page_size)
query_string = await create_query_string(user_id=user_id, page_id=page_id, page_limit=page_limit, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/rewards/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/users/{user_id}/points/history/", user_id=user_id, page_id=page_id, page_limit=page_limit, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Analytics:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def getStatiscticsGroups(self, ):
"""Get statistics groups
"""
payload = {}
# Parameter validation
schema = AnalyticsValidator.getStatiscticsGroups()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/group", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", )
query_string = await create_query_string()
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/group", ), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getStatiscticsGroupComponents(self, group_name=None):
"""Get statistics group components
:param group_name : Group name : type string
"""
payload = {}
if group_name:
payload["group_name"] = group_name
# Parameter validation
schema = AnalyticsValidator.getStatiscticsGroupComponents()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/group/{group_name}", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"group_name","in":"path","description":"Group name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", group_name=group_name)
query_string = await create_query_string(group_name=group_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/group/{group_name}", group_name=group_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getComponentStatsCSV(self, component_name=None):
"""Get component statistics csv
:param component_name : Component name : type string
"""
payload = {}
if component_name:
payload["component_name"] = component_name
# Parameter validation
schema = AnalyticsValidator.getComponentStatsCSV()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}.csv", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"component_name","in":"path","description":"Component name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", component_name=component_name)
query_string = await create_query_string(component_name=component_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}.csv", component_name=component_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getComponentStatsPDF(self, component_name=None):
"""Get component statistics pdf
:param component_name : Component name : type string
"""
payload = {}
if component_name:
payload["component_name"] = component_name
# Parameter validation
schema = AnalyticsValidator.getComponentStatsPDF()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}.pdf", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"component_name","in":"path","description":"Component name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", component_name=component_name)
query_string = await create_query_string(component_name=component_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}.pdf", component_name=component_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getComponentStats(self, component_name=None):
"""Get component statistics
:param component_name : Component name : type string
"""
payload = {}
if component_name:
payload["component_name"] = component_name
# Parameter validation
schema = AnalyticsValidator.getComponentStats()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"component_name","in":"path","description":"Component name","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", component_name=component_name)
query_string = await create_query_string(component_name=component_name)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/stats/component/{component_name}", component_name=component_name), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAbandonCartList(self, from_date=None, to_date=None, page_no=None, page_size=None):
"""Get abandon carts list
:param from_date : From date : type string
:param to_date : To date : type string
:param page_no : Current page number : type integer
:param page_size : Current page size : type integer
"""
payload = {}
if from_date:
payload["from_date"] = from_date
if to_date:
payload["to_date"] = to_date
if page_no:
payload["page_no"] = page_no
if page_size:
payload["page_size"] = page_size
# Parameter validation
schema = AnalyticsValidator.getAbandonCartList()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/from/{from_date}/to/{to_date}/abandon-cart/", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"from_date","in":"path","description":"From date","required":true,"schema":{"type":"string"}},{"name":"to_date","in":"path","description":"To date","required":true,"schema":{"type":"string"}}],"optional":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"type":"integer","default":0}},{"name":"page_size","in":"query","description":"Current page size","required":false,"schema":{"type":"integer","default":10}}],"query":[{"name":"page_no","in":"query","description":"Current page number","required":false,"schema":{"type":"integer","default":0}},{"name":"page_size","in":"query","description":"Current page size","required":false,"schema":{"type":"integer","default":10}}],"headers":[]}""", from_date=from_date, to_date=to_date, page_no=page_no, page_size=page_size)
query_string = await create_query_string(from_date=from_date, to_date=to_date, page_no=page_no, page_size=page_size)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/from/{from_date}/to/{to_date}/abandon-cart/", from_date=from_date, to_date=to_date, page_no=page_no, page_size=page_size), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAbandonCartsCSV(self, from_date=None, to_date=None):
"""Get abandon carts csv
:param from_date : From date : type string
:param to_date : To date : type string
"""
payload = {}
if from_date:
payload["from_date"] = from_date
if to_date:
payload["to_date"] = to_date
# Parameter validation
schema = AnalyticsValidator.getAbandonCartsCSV()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/{from_date}/to/{to_date}/abandon-cart.csv", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"from_date","in":"path","description":"From date","required":true,"schema":{"type":"string"}},{"name":"to_date","in":"path","description":"To date","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", from_date=from_date, to_date=to_date)
query_string = await create_query_string(from_date=from_date, to_date=to_date)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/{from_date}/to/{to_date}/abandon-cart.csv", from_date=from_date, to_date=to_date), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
async def getAbandonCartDetail(self, cart_id=None):
"""Get abandon cart details
:param cart_id : Cart Id : type string
"""
payload = {}
if cart_id:
payload["cart_id"] = cart_id
# Parameter validation
schema = AnalyticsValidator.getAbandonCartDetail()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/abandon-cart/{cart_id}", """{"required":[{"name":"company_id","in":"path","description":"Company Id","required":true,"schema":{"type":"string"}},{"name":"application_id","in":"path","description":"Application Id","required":true,"schema":{"type":"string"}},{"name":"cart_id","in":"path","description":"Cart Id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", cart_id=cart_id)
query_string = await create_query_string(cart_id=cart_id)
return await AiohttpHelper().aiohttp_request("GET", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "get", await create_url_without_domain(f"/service/platform/analytics/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/cart/abandon-cart/{cart_id}", cart_id=cart_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Discount:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class Partner:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
async def addProxyPath(self, extension_id=None, body=""):
"""Add proxy path for external url
:param extension_id : Extension id : type string
"""
payload = {}
if extension_id:
payload["extension_id"] = extension_id
# Parameter validation
schema = PartnerValidator.addProxyPath()
schema.dump(schema.load(payload))
# Body validation
from .models.AddProxyReq import AddProxyReq
schema = AddProxyReq()
schema.dump(schema.load(body))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/partners/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/proxy/{extension_id}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"name":"extension_id","in":"path","description":"Extension id","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", extension_id=extension_id)
query_string = await create_query_string(extension_id=extension_id)
return await AiohttpHelper().aiohttp_request("POST", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "post", await create_url_without_domain(f"/service/platform/partners/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/proxy/{extension_id}", extension_id=extension_id), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, body, exclude_headers=["Authorization"]), data=body)
async def removeProxyPath(self, extension_id=None, attached_path=None):
"""Remove proxy path for external url
:param extension_id : Extension id : type string
:param attached_path : Attachaed path slug : type string
"""
payload = {}
if extension_id:
payload["extension_id"] = extension_id
if attached_path:
payload["attached_path"] = attached_path
# Parameter validation
schema = PartnerValidator.removeProxyPath()
schema.dump(schema.load(payload))
url_with_params = await create_url_with_params(self._conf.domain, f"/service/platform/partners/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/proxy/{extension_id}/{attached_path}", """{"required":[{"schema":{"type":"string"},"description":"Current company id","in":"path","required":true,"name":"company_id"},{"schema":{"type":"string"},"description":"Current application id","in":"path","required":true,"name":"application_id"},{"name":"extension_id","in":"path","description":"Extension id","required":true,"schema":{"type":"string"}},{"name":"attached_path","in":"path","description":"Attachaed path slug","required":true,"schema":{"type":"string"}}],"optional":[],"query":[],"headers":[]}""", extension_id=extension_id, attached_path=attached_path)
query_string = await create_query_string(extension_id=extension_id, attached_path=attached_path)
return await AiohttpHelper().aiohttp_request("DELETE", url_with_params, headers=await get_headers_with_signature(self._conf.domain, "delete", await create_url_without_domain(f"/service/platform/partners/v1.0/company/{self._conf.companyId}/application/{self.applicationId}/proxy/{extension_id}/{attached_path}", extension_id=extension_id, attached_path=attached_path), query_string, {"Authorization": "Bearer " + await self._conf.getAccessToken()}, "", exclude_headers=["Authorization"]), data="")
class Webhook:
def __init__(self, config, applicationId):
self._conf = config
self.applicationId = applicationId
class PlatformApplicationClient:
def __init__(self, applicationId, config):
self.common = Common(config, applicationId)
self.lead = Lead(config, applicationId)
self.feedback = Feedback(config, applicationId)
self.theme = Theme(config, applicationId)
self.user = User(config, applicationId)
self.content = Content(config, applicationId)
self.billing = Billing(config, applicationId)
self.communication = Communication(config, applicationId)
self.payment = Payment(config, applicationId)
self.order = Order(config, applicationId)
self.catalog = Catalog(config, applicationId)
self.companyProfile = CompanyProfile(config, applicationId)
self.fileStorage = FileStorage(config, applicationId)
self.share = Share(config, applicationId)
self.inventory = Inventory(config, applicationId)
self.configuration = Configuration(config, applicationId)
self.cart = Cart(config, applicationId)
self.rewards = Rewards(config, applicationId)
self.analytics = Analytics(config, applicationId)
self.discount = Discount(config, applicationId)
self.partner = Partner(config, applicationId)
self.webhook = Webhook(config, applicationId)
| 84.139555
| 4,458
| 0.687465
| 58,229
| 487,757
| 5.598276
| 0.019681
| 0.032198
| 0.030907
| 0.034824
| 0.912541
| 0.901599
| 0.895488
| 0.889288
| 0.882705
| 0.870428
| 0
| 0.005039
| 0.143172
| 487,757
| 5,796
| 4,459
| 84.154072
| 0.774964
| 0.014587
| 0
| 0.560324
| 0
| 0.272248
| 0.496623
| 0.407655
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00809
| false
| 0.002814
| 0.045375
| 0
| 0.152304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
304cfc9cd5e746d495094135e20125342a834852
| 6,211
|
py
|
Python
|
Genre Prediction/Making_Dataframes/Making_df_horror.py
|
chanakya1310/background-score-prediction
|
e56a5534e9a26a459a8ff561594a2fdef5276f7f
|
[
"MIT"
] | null | null | null |
Genre Prediction/Making_Dataframes/Making_df_horror.py
|
chanakya1310/background-score-prediction
|
e56a5534e9a26a459a8ff561594a2fdef5276f7f
|
[
"MIT"
] | null | null | null |
Genre Prediction/Making_Dataframes/Making_df_horror.py
|
chanakya1310/background-score-prediction
|
e56a5534e9a26a459a8ff561594a2fdef5276f7f
|
[
"MIT"
] | null | null | null |
import pandas as pd
rows = [[0.0, 0.0, 0.0, 0.03, 0.63, 0.34, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.11, 0.79, 0.11, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.71, 0.29, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.03, 0.01, 0.52, 0.44, 0.0, 0.0, 1.0],
[0.08, 0.0, 0.47, 0.12, 0.31, 0.02, 0.0, 0.0, 1.0], [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.9, 0.0, 0.1, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.02, 0.94, 0.04, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.92, 0.08, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.53, 0.0, 0.43, 0.04, 0.0, 0.0, 1.0], [0.0, 0.0, 0.02, 0.07, 0.86, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.53, 0.39, 0.08, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.79, 0.04, 0.17, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]] # For horror.mp4
rows1 = [[0.0, 0.0, 0.0, 0.8, 0.2, 0.0, 0.0, 0.0, 1.0, 3], [0.01, 0.0, 0.14, 0.67, 0.0, 0.17, 0.0, 0.0, 1.0, 3],
[0.02, 0.0, 0.13, 0.39, 0.46, 0.0, 0.0, 0.0, 1.0, 3], [0.03, 0.01, 0.02, 0.59, 0.35, 0.0, 0.0, 0.0, 1.0, 3],
[0.0, 0.0, 0.0, 0.86, 0.14, 0.0, 0.0, 0.0, 1.0, 3], [0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 3],
[0.0, 0.0, 0.01, 0.98, 0.01, 0.0, 0.0, 0.0, 1.0, 3], [0.0, 0.0, 0.03, 0.96, 0.0, 0.01, 0.0, 0.0, 1.0, 3],
[0.15, 0.0, 0.01, 0.79, 0.04, 0.01, 0.0, 0.0, 1.0, 3], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0],
[0.2, 0.0, 0.42, 0.0, 0.0, 0.38, 0.0, 0.0, 1.0, 0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0],
[0.0, 0.0, 0.06, 0.76, 0.06, 0.12, 0.0, 0.0, 1.0, 0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0]] # For horror1.mp4
rows2 = [[0.0, 0.0, 0.02, 0.26, 0.69, 0.02, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.57, 0.0, 0.37, 0.06, 0.0, 0.0, 1.0], [0.0, 0.0, 0.3, 0.0, 0.54, 0.14, 0.02, 0.0, 1.0], [0.05, 0.0, 0.84, 0.05, 0.02, 0.05, 0.0, 0.0, 1.0], [0.08, 0.0, 0.02, 0.04, 0.83, 0.01, 0.0, 0.0, 1.0], [0.0, 0.0, 0.44, 0.36, 0.2, 0.0, 0.0, 0.0, 1.0], [0.02, 0.0, 0.48, 0.48, 0.02, 0.0, 0.0, 0.0, 1.0], [0.15, 0.0, 0.52, 0.23, 0.04, 0.0, 0.0, 0.0, 1.0], [0.04, 0.0, 0.53, 0.15, 0.1, 0.05, 0.02, 0.0, 1.0], [0.0, 0.0, 0.31, 0.45, 0.24, 0.0, 0.0, 0.0, 1.0], [0.24, 0.0, 0.27, 0.29, 0.14, 0.05, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.46, 0.0, 0.06, 0.32, 0.0, 0.15, 0.0, 0.0, 1.0], [0.0, 0.0, 0.17, 0.57, 0.23, 0.03, 0.0, 0.0, 1.0], [0.0, 0.0, 0.34, 0.23, 0.28, 0.15, 0.0, 0.0, 1.0], [0.01, 0.0, 0.98, 0.0, 0.01, 0.0, 0.0, 0.0, 1.0], [0.01, 0.0, 0.99, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.52, 0.0, 0.3, 0.0, 0.13, 0.04, 0.0, 0.0, 1.0], [0.0, 0.0, 0.49, 0.02, 0.11, 0.38, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]]
# For horror2.mp4
rows3 = [[0.0, 0.0, 0.12, 0.24, 0.0, 0.64, 0.0, 0.0, 1.0], [0.03, 0.0, 0.31, 0.04, 0.09, 0.31, 0.0, 0.0, 1.0],
[0.03, 0.0, 0.27, 0.08, 0.4, 0.18, 0.0, 0.0, 1.0], [0.01, 0.0, 0.23, 0.43, 0.2, 0.08, 0.03, 0.0, 1.0],
[0.01, 0.0, 0.38, 0.02, 0.49, 0.08, 0.0, 0.0, 1.0], [0.0, 0.0, 0.31, 0.44, 0.2, 0.01, 0.03, 0.0, 1.0],
[0.0, 0.0, 0.04, 0.05, 0.58, 0.22, 0.0, 0.0, 1.0], [0.07, 0.0, 0.11, 0.06, 0.6, 0.17, 0.0, 0.0, 1.0],
[0.14, 0.0, 0.07, 0.0, 0.34, 0.39, 0.0, 0.0, 1.0], [0.2, 0.0, 0.0, 0.4, 0.2, 0.2, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.08, 0.5, 0.12, 0.0, 0.0, 0.0, 1.0], [0.06, 0.0, 0.64, 0.07, 0.03, 0.09, 0.0, 1.0, 0.0],
[0.12, 0.0, 0.36, 0.03, 0.17, 0.25, 0.0, 0.0, 1.0], [0.01, 0.0, 0.21, 0.08, 0.03, 0.31, 0.0, 0.0, 1.0],
[0.06, 0.0, 0.04, 0.06, 0.0, 0.65, 0.0, 0.0, 1.0], [0.23, 0.0, 0.05, 0.11, 0.61, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.67, 0.0, 0.33, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.3, 0.3, 0.36, 0.04, 0.0, 0.0, 1.0], [0.0, 0.0, 0.28, 0.0, 0.47, 0.24, 0.0, 0.0, 1.0],
[0.25, 0.0, 0.12, 0.38, 0.12, 0.12, 0.0, 0.0, 1.0], [0.11, 0.0, 0.11, 0.12, 0.0, 0.66, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.59, 0.05, 0.37, 0.0, 0.0, 0.0, 1.0]]
rows4 = [[0.1, 0.0, 0.19, 0.09, 0.27, 0.33, 0.02, 0.0, 1.0], [0.08, 0.0, 0.78, 0.11, 0.0, 0.03, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 1.0], [0.07, 0.0, 0.0, 0.0, 0.42, 0.51, 0.0, 0.0, 1.0],
[0.17, 0.0, 0.0, 0.05, 0.0, 0.78, 0.0, 0.0, 1.0], [0.01, 0.0, 0.45, 0.13, 0.37, 0.05, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.01, 0.03, 0.0, 0.96, 0.0, 0.0, 1.0], [0.2, 0.0, 0.67, 0.07, 0.01, 0.03, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.57, 0.06, 0.14, 0.13, 0.1, 0.0, 1.0], [0.17, 0.0, 0.0, 0.08, 0.58, 0.08, 0.08, 0.0, 1.0],
[0.06, 0.0, 0.0, 0.0, 0.94, 0.0, 0.0, 0.0, 1.0], [0.02, 0.0, 0.07, 0.02, 0.12, 0.48, 0.14, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0], [0.0, 0.0, 0.11, 0.0, 0.44, 0.39, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.06, 0.18, 0.27, 0.49, 0.0, 0.0, 1.0], [0.04, 0.0, 0.07, 0.0, 0.41, 0.48, 0.0, 0.0, 1.0],
[0.25, 0.0, 0.0, 0.45, 0.07, 0.15, 0.09, 0.0, 1.0],
[0.2, 0.0, 0.0, 0.73, 0.0, 0.07, 0.0, 0.0, 1.0], [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]] # For horror4.mp4
for i in range(0, len(rows)):
rows[i] = rows[i] + [0]
for i in range(0, len(rows2)):
rows2[i] = rows2[i] + [0]
for i in range(0, len(rows3)):
rows3[i] = rows3[i] + [0]
for i in range(0, len(rows4)):
rows4[i] = rows4[i] + [0]
rows = rows + rows1
rows = rows + rows2
rows = rows + rows3
rows = rows + rows4
df = pd.DataFrame(rows, columns = ["Angry", "Disgusted", "Fearful", "Happy", "Neutral", "Sad", "Surprised", "Light Background", "Dark Background", "Genre"])
df.to_csv('horror.csv', index = False)
print(df.head())
print(df.shape)
| 77.6375
| 1,156
| 0.422959
| 2,036
| 6,211
| 1.289784
| 0.058939
| 0.814928
| 0.935644
| 0.935263
| 0.755522
| 0.673648
| 0.642041
| 0.597487
| 0.550267
| 0.509139
| 0
| 0.456011
| 0.211238
| 6,211
| 80
| 1,157
| 77.6375
| 0.080016
| 0.009982
| 0
| 0
| 0
| 0
| 0.014809
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016393
| 0
| 0.016393
| 0.032787
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
306a166ef98d73de6bf5bfd07de5b3899d334743
| 2,710
|
py
|
Python
|
flypy/compiler/tests/test_signature.py
|
filmackay/flypy
|
d64e70959c5c8af9e914dcc3ce1068fb99859c3a
|
[
"BSD-2-Clause"
] | null | null | null |
flypy/compiler/tests/test_signature.py
|
filmackay/flypy
|
d64e70959c5c8af9e914dcc3ce1068fb99859c3a
|
[
"BSD-2-Clause"
] | null | null | null |
flypy/compiler/tests/test_signature.py
|
filmackay/flypy
|
d64e70959c5c8af9e914dcc3ce1068fb99859c3a
|
[
"BSD-2-Clause"
] | 1
|
2020-01-01T00:43:24.000Z
|
2020-01-01T00:43:24.000Z
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import unittest
from flypy.compiler.signature import flatargs
class TestArgParse(unittest.TestCase):
def test_empty(self):
def f():
assert False
self.assertEqual(flatargs(f, (), {}), ())
self.assertRaises(TypeError, flatargs, f, (1,), {})
self.assertRaises(TypeError, flatargs, f, (), {'a': 10})
def test_empty_varargs(self):
def f(*args):
assert False
self.assertEqual(flatargs(f, (), {}), ((),))
self.assertEqual(flatargs(f, (1, 2, 3), {}), ((1, 2, 3),))
self.assertRaises(TypeError, flatargs, f, (), {'a': 10})
def test_empty_keywords(self):
def f(**kwargs):
assert False
self.assertEqual(flatargs(f, (), {}), ({},))
self.assertEqual(flatargs(f, (), {'a': 10}), ({'a': 10},))
self.assertRaises(TypeError, flatargs, f, (1, 2, 3), {})
def test_args(self):
def f(a, b):
assert False
self.assertEqual(flatargs(f, (1, 2), {}), (1, 2))
self.assertEqual(flatargs(f, (1,), {'b': 2}), (1, 2))
self.assertEqual(flatargs(f, (), {'a': 1, 'b': 2}), (1, 2))
self.assertRaises(TypeError, flatargs, f, (1, 2, 3), {})
self.assertRaises(TypeError, flatargs, f, (1,), {})
self.assertRaises(TypeError, flatargs, f, (1, 2), {'b': 3})
self.assertRaises(TypeError, flatargs, f, (), {'a': 1, 'b': 2, 'c': 3})
def test_defaults(self):
def f(a, b=3):
assert False
self.assertEqual(flatargs(f, (1, 2), {}), (1, 2))
self.assertEqual(flatargs(f, (1,), {'b': 2}), (1, 2))
self.assertEqual(flatargs(f, (), {'a': 1, 'b': 2}), (1, 2))
self.assertEqual(flatargs(f, (), {'a': 1}), (1, 3))
self.assertEqual(flatargs(f, (1,), {}), (1, 3))
self.assertRaises(TypeError, flatargs, f, (1, 2, 3), {})
self.assertRaises(TypeError, flatargs, f, (), {})
self.assertRaises(TypeError, flatargs, f, (1, 2), {'b': 3})
self.assertRaises(TypeError, flatargs, f, (), {'a': 1, 'b': 2, 'c': 3})
def test_all_the_above(self):
def f(a, b=3, *args, **kwargs):
assert False
self.assertEqual(flatargs(f, (1,), {}), (1, 3, (), {}))
self.assertEqual(flatargs(f, (1, 2), {}), (1, 2, (), {}))
self.assertEqual(flatargs(f, (1,), {'b': 2}), (1, 2, (), {}))
self.assertEqual(flatargs(f, (1, 2, 3, 4), {'d': 4}),
(1, 2, (3, 4), {'d': 4}))
self.assertRaises(TypeError, flatargs(f, (1, 2, 3), {'b': 2, 'd': 4}))
if __name__ == '__main__':
unittest.main()
| 35.657895
| 79
| 0.515867
| 338
| 2,710
| 4.065089
| 0.142012
| 0.196507
| 0.131004
| 0.296943
| 0.804949
| 0.796943
| 0.774381
| 0.73508
| 0.631004
| 0.631004
| 0
| 0.047785
| 0.258672
| 2,710
| 76
| 80
| 35.657895
| 0.636137
| 0.007749
| 0
| 0.418182
| 0
| 0
| 0.011905
| 0
| 0
| 0
| 0
| 0
| 0.654545
| 1
| 0.218182
| false
| 0
| 0.054545
| 0
| 0.290909
| 0.018182
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
064d0084cebc0ae71da71647ee880e625a35ed5b
| 1,457
|
py
|
Python
|
amberpy/utilities.py
|
pacilab/amberpy
|
80767f9222243b7aafb61358c3397dc44e15da02
|
[
"MIT"
] | null | null | null |
amberpy/utilities.py
|
pacilab/amberpy
|
80767f9222243b7aafb61358c3397dc44e15da02
|
[
"MIT"
] | null | null | null |
amberpy/utilities.py
|
pacilab/amberpy
|
80767f9222243b7aafb61358c3397dc44e15da02
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 26
@author: bs15ansj
This module contains simple utlility functions used by other modules of
amberpy.
get_name_from_file(file)
This function returns a name based on an input file. The name is the
basename of the file without the suffix. If the file is None, None will
be returned.
get_name_from_input_list(input_list)
Given a list of inputs (which may be names or file names), this function
returns the inputs as a string containing the names of the inputs separated
by '.'. If any of the inputs are None, these are not included in the
returned name string.
"""
import os
def get_name_from_file(file):
'''
This function returns a name based on an input file. The name is the
basename of the file without the suffix. If the file is None, None will
be returned.
'''
try:
return '.'.join(os.path.basename(file).split('.')[:-1])
except:
return None
def get_name_from_input_list(input_list):
'''
Given a list of inputs (which may be names or file names), this function
returns the inputs as a string containing the names of the inputs separated
by '.'. If any of the inputs are None, these are not included in the
returned name string.
'''
return '.'.join(filter(None.__ne__,[get_name_from_file(file) if get_name_from_file(file) != '' else file for file in input_list]))
| 34.690476
| 134
| 0.696637
| 237
| 1,457
| 4.168776
| 0.320675
| 0.04251
| 0.066802
| 0.060729
| 0.755061
| 0.716599
| 0.716599
| 0.716599
| 0.716599
| 0.716599
| 0
| 0.006206
| 0.225806
| 1,457
| 42
| 134
| 34.690476
| 0.869681
| 0.728209
| 0
| 0
| 0
| 0
| 0.008902
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.125
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
2332c2eb0625e035d5d561f276ec7ca621e35fcb
| 173
|
py
|
Python
|
tests/parser/detcons.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/detcons.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/detcons.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
c | d.
a | c.
a | z.
:- not a.
:- not z.
x | w.
z | x.
"""
output = """
c | d.
a | c.
a | z.
:- not a.
:- not z.
x | w.
z | x.
"""
| 6.407407
| 12
| 0.271676
| 30
| 173
| 1.566667
| 0.3
| 0.170213
| 0.12766
| 0.170213
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0.765957
| 0
| 0
| 0.462428
| 173
| 26
| 13
| 6.653846
| 0.505376
| 0
| 0
| 0.888889
| 0
| 0
| 0.794702
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
23539a9308dbb3af66570f63fbbaa8c62dd431db
| 159
|
py
|
Python
|
admin_views/compat.py
|
estyxx/django-admin-views
|
f370a69a658301647033f778c472e044283d82e7
|
[
"BSD-3-Clause"
] | 85
|
2015-01-18T05:52:43.000Z
|
2021-06-21T15:58:59.000Z
|
admin_views/compat.py
|
estyxx/django-admin-views
|
f370a69a658301647033f778c472e044283d82e7
|
[
"BSD-3-Clause"
] | 23
|
2015-02-01T16:53:59.000Z
|
2021-12-02T15:41:36.000Z
|
admin_views/compat.py
|
estyxx/django-admin-views
|
f370a69a658301647033f778c472e044283d82e7
|
[
"BSD-3-Clause"
] | 26
|
2015-07-26T20:54:55.000Z
|
2020-12-23T11:48:11.000Z
|
try:
from django.utils.module_loading import import_string
except ImportError:
from django.utils.module_loading import import_by_path as import_string
| 31.8
| 75
| 0.830189
| 23
| 159
| 5.478261
| 0.565217
| 0.15873
| 0.238095
| 0.333333
| 0.634921
| 0.634921
| 0.634921
| 0
| 0
| 0
| 0
| 0
| 0.132075
| 159
| 4
| 76
| 39.75
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
236af32b7243736528e0fbbfa1ae265e7624aae3
| 16,652
|
py
|
Python
|
netests/converters/vrf/iosxr/nc.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 14
|
2020-06-08T07:34:59.000Z
|
2022-03-14T08:52:03.000Z
|
netests/converters/vrf/iosxr/nc.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | null | null | null |
netests/converters/vrf/iosxr/nc.py
|
Netests/netests
|
1a48bda461761c4ec854d6fa0c38629049009a4a
|
[
"MIT"
] | 3
|
2020-06-19T03:57:05.000Z
|
2020-06-22T22:46:42.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from netests.constants import NOT_SET
from netests.tools.nc import format_xml_output
from netests.protocols.vrf import VRF, ListVRF
def _iosxr_vrf_nc_converter(
hostname: str(),
cmd_output,
options={}
) -> ListVRF:
if cmd_output.get('VRF') is not None:
cmd_output['VRF'] = format_xml_output(cmd_output.get('VRF'))
if cmd_output.get('BGP') is not None:
cmd_output['BGP'] = format_xml_output(cmd_output.get('BGP'))
vrf_list = ListVRF(vrf_lst=list())
vrf_list.vrf_lst.append(
VRF(
vrf_name="default",
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=NOT_SET,
rt_imp=NOT_SET,
rt_exp=NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET,
options=options
)
)
if (
'VRF' in cmd_output.keys() and
'data' in cmd_output.get('VRF').keys() and
'vrfs' in cmd_output.get('VRF').get('data').keys() and
'vrf' in cmd_output.get('VRF').get('data').get('vrfs').keys()
):
if isinstance(
cmd_output.get('VRF').get('data').get('vrfs').get('vrf'),
dict
):
rias = None
riin = None
reas = None
rein = None
v = cmd_output.get('VRF').get('data').get('vrfs').get('vrf')
if (
'afs' in v.keys() and
'af' in v.get('afs').keys() and
'bgp' in v.get('afs').get('af').keys()
):
rias = v.get('afs').get('af').get('bgp') \
.get('import-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as')
riin = v.get('afs').get('af').get('bgp') \
.get('import-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as-index')
reas = v.get('afs').get('af').get('bgp') \
.get('export-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as')
rein = v.get('afs').get('af').get('bgp') \
.get('export-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as-index')
rd = NOT_SET
if (
cmd_output.get('BGP') is not None and
'data' in cmd_output.get('BGP').keys() and
'bgp' in cmd_output.get('BGP')
.get('data').keys() and
'instance' in cmd_output.get('BGP')
.get('data')
.get('bgp').keys() and
'instance-as' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance').keys() and
'four-byte-as' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as').keys() and
'vrfs' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as').keys() and
'vrf' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs').keys()
):
if isinstance(cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs')
.get('vrf'), dict):
vrf = cmd_output.get('BGP') \
.get('data') \
.get('bgp') \
.get('instance') \
.get('instance-as') \
.get('four-byte-as') \
.get('vrfs') \
.get('vrf')
if vrf.get('vrf-name') == v.get('vrf-name'):
if (
'vrf-global' in vrf.keys() and
'route-distinguisher' in vrf.get('vrf-global')
.keys()
):
rd = vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as') + ":" + \
vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as-index')
elif isinstance(cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs')
.get('vrf'), list):
for vrf in cmd_output.get('BGP') \
.get('data') \
.get('bgp') \
.get('instance') \
.get('instance-as') \
.get('four-byte-as') \
.get('vrfs') \
.get('vrf'):
if vrf.get('vrf-name') == v.get('vrf-name'):
if (
'vrf-global' in vrf.keys() and
'route-distinguisher' in
vrf.get('vrf-global').keys()
):
rd = vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as') + ":" + \
vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as-index')
vrf_list.vrf_lst.append(
VRF(
vrf_name=v.get('vrf-name'),
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=rd,
rt_imp=f"{rias}:{riin}" if rias is not None else NOT_SET,
rt_exp=f"{reas}:{rein}" if reas is not None else NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET,
options=options
)
)
elif isinstance(
cmd_output.get('VRF').get('data').get('vrfs').get('vrf'),
list
):
for v in cmd_output.get('VRF').get('data').get('vrfs').get('vrf'):
rias = None
riin = None
reas = None
rein = None
if (
'afs' in v.keys() and 'af' in v.get('afs').keys() and
'bgp' in v.get('afs').get('af').keys()
):
rias = v.get('afs').get('af').get('bgp') \
.get('import-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as')
riin = v.get('afs').get('af').get('bgp') \
.get('import-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as-index')
reas = v.get('afs').get('af').get('bgp') \
.get('export-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as')
rein = v.get('afs').get('af').get('bgp') \
.get('export-route-targets') \
.get('route-targets') \
.get('route-target') \
.get('as-or-four-byte-as') \
.get('as-index')
rd = NOT_SET
if (
cmd_output.get('BGP') is not None and
'data' in cmd_output.get('BGP').keys() and
'bgp' in cmd_output.get('BGP')
.get('data').keys() and
'instance' in cmd_output.get('BGP')
.get('data')
.get('bgp').keys() and
'instance-as' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance').keys() and
'four-byte-as' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as').keys() and
'vrfs' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as').keys() and
'vrf' in cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs').keys()
):
if isinstance(cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs')
.get('vrf'), dict):
vrf = cmd_output.get('BGP') \
.get('data') \
.get('bgp') \
.get('instance') \
.get('instance-as') \
.get('four-byte-as') \
.get('vrfs') \
.get('vrf')
if vrf.get('vrf-name') == v.get('vrf-name'):
if (
'vrf-global' in vrf.keys() and
'route-distinguisher' in vrf.get('vrf-global')
.keys()
):
rd = vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as') + ":" + \
vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as-index')
elif isinstance(cmd_output.get('BGP')
.get('data')
.get('bgp')
.get('instance')
.get('instance-as')
.get('four-byte-as')
.get('vrfs')
.get('vrf'), list):
for vrf in cmd_output.get('BGP') \
.get('data') \
.get('bgp') \
.get('instance') \
.get('instance-as') \
.get('four-byte-as') \
.get('vrfs') \
.get('vrf'):
if vrf.get('vrf-name') == v.get('vrf-name'):
if (
'vrf-global' in vrf.keys() and
'route-distinguisher' in
vrf.get('vrf-global').keys()
):
rd = vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as') + ":" + \
vrf.get('vrf-global') \
.get('route-distinguisher') \
.get('as-index')
vrf_list.vrf_lst.append(
VRF(
vrf_name=v.get('vrf-name'),
vrf_id=NOT_SET,
vrf_type=NOT_SET,
l3_vni=NOT_SET,
rd=rd,
rt_imp=f"{rias}:{riin}"
if rias is not None else NOT_SET,
rt_exp=f"{reas}:{rein}"
if reas is not None else NOT_SET,
imp_targ=NOT_SET,
exp_targ=NOT_SET,
options=options
)
)
return vrf_list
| 48.976471
| 79
| 0.27276
| 1,208
| 16,652
| 3.671358
| 0.06457
| 0.070349
| 0.08929
| 0.087937
| 0.930327
| 0.921759
| 0.903269
| 0.893123
| 0.864938
| 0.857497
| 0
| 0.00077
| 0.609957
| 16,652
| 339
| 80
| 49.120944
| 0.682063
| 0.002582
| 0
| 0.863492
| 0
| 0
| 0.126814
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003175
| false
| 0
| 0.022222
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
88e3f98c6bd9cdaea8c618d746a24f58a4b07d3d
| 2,398
|
py
|
Python
|
rlberry/agents/ucbvi/utils.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
rlberry/agents/ucbvi/utils.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
rlberry/agents/ucbvi/utils.py
|
antoine-moulin/rlberry
|
676af9d1bb9094a6790a9aa3ff7e67b13584a183
|
[
"MIT"
] | null | null | null |
from rlberry.utils.jit_setup import numba_jit
@numba_jit
def update_value_and_get_action(state,
hh,
V,
R_hat,
P_hat,
B_sa,
gamma,
v_max):
"""
state : int
hh : int
V : np.ndarray
shape (H, S)
R_hat : np.ndarray
shape (S, A)
P_hat : np.ndarray
shape (S, A, S)
B_sa : np.ndarray
shape (S, A)
gamma : double
v_max : np.ndarray
shape (H,)
"""
H = V.shape[0]
S, A = R_hat.shape[-2:]
best_action = 0
max_val = 0
previous_value = V[hh, state]
for aa in range(A):
q_aa = R_hat[state, aa] + B_sa[state, aa]
if hh < H-1:
for sn in range(S):
q_aa += gamma*P_hat[state, aa, sn]*V[hh+1, sn]
if aa == 0 or q_aa > max_val:
max_val = q_aa
best_action = aa
V[hh, state] = max_val
V[hh, state] = min(v_max[hh], V[hh, state])
V[hh, state] = min(previous_value, V[hh, state])
return best_action
@numba_jit
def update_value_and_get_action_sd(state,
hh,
V,
R_hat,
P_hat,
B_sa,
gamma,
v_max):
"""
state : int
hh : int
V : np.ndarray
shape (H, S)
R_hat : np.ndarray
shape (H, S, A)
P_hat : np.ndarray
shape (H, S, A, S)
B_sa : np.ndarray
shape (S, A)
gamma : double
v_max : np.ndarray
shape (H,)
"""
H = V.shape[0]
S, A = R_hat.shape[-2:]
best_action = 0
max_val = 0
previous_value = V[hh, state]
for aa in range(A):
q_aa = R_hat[hh, state, aa] + B_sa[hh, state, aa]
if hh < H-1:
for sn in range(S):
q_aa += gamma*P_hat[hh, state, aa, sn]*V[hh+1, sn]
if aa == 0 or q_aa > max_val:
max_val = q_aa
best_action = aa
V[hh, state] = max_val
V[hh, state] = min(v_max[hh], V[hh, state])
V[hh, state] = min(previous_value, V[hh, state])
return best_action
| 24.469388
| 66
| 0.420767
| 332
| 2,398
| 2.843373
| 0.144578
| 0.111229
| 0.101695
| 0.095339
| 0.930085
| 0.930085
| 0.930085
| 0.885593
| 0.813559
| 0.813559
| 0
| 0.011076
| 0.472894
| 2,398
| 97
| 67
| 24.721649
| 0.735759
| 0.178899
| 0
| 0.867925
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.018868
| 0
| 0.09434
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
001d2abe6a0a51b43e6eb3e5ba19f3aed19cce7b
| 183
|
py
|
Python
|
src/ebonite/runtime/server/__init__.py
|
koskotG/ebonite
|
9f9ae016b70fb24865d5edc99142afb8ab4ddc59
|
[
"Apache-2.0"
] | 270
|
2019-11-14T15:46:08.000Z
|
2021-09-17T16:43:03.000Z
|
src/ebonite/runtime/server/__init__.py
|
leepand/ebonite
|
b01b662c43709d152940f488574d78ff25f89ecf
|
[
"Apache-2.0"
] | 14
|
2019-11-29T11:49:39.000Z
|
2022-02-10T00:23:59.000Z
|
src/ebonite/runtime/server/__init__.py
|
leepand/ebonite
|
b01b662c43709d152940f488574d78ff25f89ecf
|
[
"Apache-2.0"
] | 18
|
2019-11-22T13:15:14.000Z
|
2021-09-01T13:36:12.000Z
|
from .base import BaseHTTPServer, HTTPServerConfig, MalformedHTTPRequestException, Server
__all__ = ['BaseHTTPServer', 'HTTPServerConfig', 'MalformedHTTPRequestException', 'Server']
| 45.75
| 91
| 0.825137
| 12
| 183
| 12.25
| 0.666667
| 0.408163
| 0.802721
| 0.884354
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076503
| 183
| 3
| 92
| 61
| 0.869822
| 0
| 0
| 0
| 0
| 0
| 0.355191
| 0.15847
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
005115d77068ab008c288cd89874347aaeda6764
| 25,218
|
py
|
Python
|
calaccess_processed_filings/migrations/0009_auto_20180427_0011.py
|
ryanvmenezes/django-calaccess-processed-data
|
966635c8438cda440a12f7765af7c79b5bcb3995
|
[
"MIT"
] | 10
|
2016-09-20T10:39:51.000Z
|
2022-01-31T03:28:43.000Z
|
calaccess_processed_filings/migrations/0009_auto_20180427_0011.py
|
ryanvmenezes/django-calaccess-processed-data
|
966635c8438cda440a12f7765af7c79b5bcb3995
|
[
"MIT"
] | 221
|
2016-02-14T23:08:00.000Z
|
2021-06-20T15:30:09.000Z
|
calaccess_processed_filings/migrations/0009_auto_20180427_0011.py
|
ryanvmenezes/django-calaccess-processed-data
|
966635c8438cda440a12f7765af7c79b5bcb3995
|
[
"MIT"
] | 44
|
2016-09-30T18:02:25.000Z
|
2022-01-14T02:25:55.000Z
|
# Generated by Django 2.0.4 on 2018-04-27 00:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('calaccess_processed_filings', '0008_auto_20180426_2358'),
]
operations = [
migrations.CreateModel(
name='Form461Part5Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('line_item', models.IntegerField(help_text='Line number of the filing form where the payment is itemized (from EXPN_CD.LINE_ITEM)', verbose_name='line item')),
('payee_code', models.CharField(blank=True, choices=[('BNM', "Ballot measure's name/title"), ('CAO', 'Candidate/officeholder'), ('COM', 'Committee'), ('IND', 'Individual'), ('MBR', 'Member of Associaton'), ('OFF', 'Officer'), ('OTH', 'Other'), ('PTY', 'Political Party'), ('RCP', 'Recipient committee'), ('SCC', 'Small Contributor Committee'), ('???', 'Unknown value')], help_text='Code describing the payee (from EXPN_CD.ENTITY_CD)', max_length=3, verbose_name='payee code')),
('payee_committee_id', models.CharField(blank=True, help_text="Payee's filer identification number, if it is a committee (from EXPN_CD.CMTE_ID)", max_length=9, verbose_name='payee committee id')),
('payee_title', models.CharField(blank=True, help_text='Name title of the payee (from EXPN_CD.PAYEE_NAMT)', max_length=10, verbose_name='payee title')),
('payee_lastname', models.CharField(blank=True, help_text='Last name of the payee or business name (from EXPN_CD.PAYEE_NAML)', max_length=200, verbose_name='payee lastname')),
('payee_firstname', models.CharField(help_text='First name of the payee (from EXPN_CD.PAYEE_NAMF)', max_length=45, verbose_name='payee firstname')),
('payee_name_suffix', models.CharField(blank=True, help_text='Name suffix of the payee (from EXPN_CD.PAYEE_NAMS)', max_length=10, verbose_name='payee name suffix')),
('payee_city', models.CharField(blank=True, help_text='City of the payee (from EXPN_CD.PAYEE_CITY)', max_length=30, verbose_name='payee city')),
('payee_state', models.CharField(blank=True, help_text='State of the payee (from EXPN_CD.PAYEE_ST)', max_length=2, verbose_name='payee state')),
('payee_zip', models.CharField(blank=True, help_text='Zip code (usually zip5, sometimes zip9) of the payee (from EXPN_CD.PAYEE_ZIP4)', max_length=10, verbose_name='payee zip')),
('treasurer_title', models.CharField(blank=True, help_text="Name title of the payee committee's treasurer (from EXPN_CD.TRES_NAMT)", max_length=10, verbose_name='treasurer title')),
('treasurer_lastname', models.CharField(blank=True, help_text="Last name of the payee committee's treasurer (from EXPN_CD.TRES_NAML)", max_length=200, verbose_name='treasurer lastname')),
('treasurer_firstname', models.CharField(help_text="First name of the payee committee's treasurer (from EXPN_CD.TRES_NAMF)", max_length=45, verbose_name='treasurer firstname')),
('treasurer_name_suffix', models.CharField(blank=True, help_text="Name suffix of the payee committee's treasurer (from EXPN_CD.TRES_NAMS)", max_length=10, verbose_name='treasurer name suffix')),
('treasurer_city', models.CharField(blank=True, help_text="City of the payee committee's treasurer (from EXPN_CD.TRES_CITY)", max_length=30, verbose_name='treasurer city')),
('treasurer_state', models.CharField(blank=True, help_text="State of the payee committee's treasurer (from EXPN_CD.TRES_ST)", max_length=2, verbose_name='treasurer state')),
('treasurer_zip', models.CharField(blank=True, help_text="Zip code (usually zip5, sometimes zip9) of the payee committee's treasurer (from EXPN_CD.TRES_ZIP4)", max_length=10, verbose_name='treasurer zip')),
('payment_code', models.CharField(blank=True, choices=[('CMP', 'Campaign paraphernalia/miscellaneous'), ('CNS', 'Campaign consultants'), ('CTB', 'Contribution (if nonmonetary, explain)*'), ('CVC', 'Civic donations'), ('FIL', 'Candidate filing/ballot feeds'), ('FND', 'Fundraising events'), ('IKD', 'In-kind contribution (nonmonetary)'), ('IND', 'Independent expenditure supporting/opposing others (explain)*'), ('LEG', 'Legal defense'), ('LIT', 'Campaign literature and mailings'), ('LON', 'Loan'), ('MBR', 'Member communications'), ('MON', 'Monetary contribution'), ('MTG', 'Meetings and appearances'), ('OFC', 'Office expenses'), ('PET', 'Petition circulating'), ('PHO', 'Phone banks'), ('POL', 'Polling and survey research'), ('POS', 'Postage, delivery and messenger services'), ('PRO', 'Professional services (legal, accounting)'), ('PRT', 'Print ads'), ('RAD', 'Radio airtime and production costs'), ('RFD', 'Returned contributions'), ('SAL', 'Campaign workers salaries'), ('TEL', 'T.V. or cable airtime and production costs'), ('TRC', 'Candidate travel, lodging and meals (explain)'), ('TRS', 'Staff/spouse travel, lodging and meals (explain)'), ('TSF', 'Transfer between committees of the same candidate/sponsor'), ('VOT', 'Voter registration'), ('WEB', 'Information technology costs (internet, e-mail)'), ('???', 'Unknown value')], help_text='Code describing the payment (from EXPN_CD.EXPN_CODE)', max_length=3, verbose_name='payment code')),
('payment_description', models.CharField(blank=True, help_text='Purpose of payment and/or description/explanation (from EXPN_CD.EXPN_DSCR)', max_length=400, verbose_name='payment description')),
('amount', models.DecimalField(decimal_places=2, help_text='Amount paid to the payee in the period covered by the filing (from EXPN_CD.AMOUNT)', max_digits=14, verbose_name='amount')),
('cumulative_ytd_amount', models.DecimalField(decimal_places=2, help_text="Cumulative year-to-date amount given or spent by the filer in support or opposition of the candidate or ballot measure as of the Form 460's filing date (from EXPN_CD.CUM_YTD)", max_digits=14, null=True)),
('expense_date', models.DateField(help_text='Date or expense (from EXPN_CD.EXPN_DATE)', null=True, verbose_name='expense date')),
('check_number', models.CharField(blank=True, help_text='Optional check number for the payment made by the campaign filer (from EXPN_CD.EXPN_CHKNO)', max_length=20, verbose_name='expense check number')),
('support_oppose_code', models.CharField(blank=True, choices=[('S', 'Support'), ('O', 'Oppose'), ('?', 'Unknown value')], help_text='If applicable, code indicating whether the payment went toward supporting or opposing a candidate/ballot measure (from EXPN_CD.SUP_OPP_CD)', max_length=1, verbose_name='support oppose code')),
('ballot_measure_jurisdiction', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, the jurisdiction subject to the ballot measure (from EXPN_CD.BAL_JURIS)', max_length=40, verbose_name='ballot measure jurisdiction')),
('ballot_measure_name', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, name of the ballot measure (from EXPN_CD.BAL_NAME or EXPN_CD.CAND_NAML)', max_length=200, verbose_name='ballot measure name')),
('ballot_measure_num', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, ballot number or letter (from EXPN_CD.BAL_NUM)', max_length=7, verbose_name='ballot measure number')),
('candidate_title', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, name title of the candidate (from EXPN_CD.CAND_NAMT)', max_length=10, verbose_name='candidate title')),
('candidate_lastname', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, last name of the candidate or business name (from EXPN_CD.CAND_NAML)', max_length=200, verbose_name='candidate lastname')),
('candidate_firstname', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, first name of the candidate (from EXPN_CD.CAND_NAMF)', max_length=45, verbose_name='candidate firstname')),
('candidate_name_suffix', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, name suffix of the candidate (from EXPN_CD.CAND_NAMS)', max_length=10, verbose_name='candidate name suffix')),
('candidate_jurisdiction_code', models.CharField(blank=True, choices=[('ASM', 'Assembly District'), ('BOE', 'Board of Equalization District'), ('CIT', 'City'), ('CTY', 'County'), ('LOC', 'Local'), ('OTH', 'Other'), ('SEN', 'Senate District'), ('STW', 'Statewide'), ('???', 'Statewide')], help_text='If the payment went toward supporting/opposing a candidate,code indicating the jurisdiction of the office (from EXPN_CD.JURIS_CD)', max_length=3, verbose_name='candidate jurisdiction')),
('candidate_jurisdiction_description', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a county, city or local candidate, full description of the office (from EXPN_CD.JURIS_DSCR)', max_length=40, verbose_name='candidate jurisdiciton description')),
('candidate_district', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate,for state senate, assembly or local board of education, thedistrict of the office (from EXPN_CD.DIST_NO)', max_length=3, verbose_name='candidate district')),
('office_sought_held', models.CharField(blank=True, choices=[('S', 'SOUGHT'), ('H', 'HELD')], help_text='If the payment went toward supporting/opposing a candidate,code indicating if the candidate is seeking or currently holds the office (from EXPN_CD.OFF_S_H_CD)', max_length=1, verbose_name='office sought or held')),
('office_code', models.CharField(blank=True, choices=[('APP', 'State Appellate Court Justice'), ('ASM', 'State Assembly Person'), ('ASR', 'Assessor'), ('ATT', 'Attorney General'), ('BED', 'Board of Education'), ('BOE', 'Board of Equalization Member'), ('BSU', 'Board of Supervisors'), ('CAT', 'City Attorney'), ('CCB', 'Community College Board'), ('CCM', 'City Council Member'), ('CON', 'State Controller'), ('COU', 'County Counsel'), ('CSU', 'County Supervisor'), ('CTR', 'Local Controller'), ('DAT', 'District Attorney'), ('GOV', 'Governor'), ('INS', 'Insurance Commissioner'), ('LTG', 'Lieutenant Governor'), ('MAY', 'Mayor'), ('OTH', 'Other'), ('PDR', 'Public Defender'), ('PER', 'Public Employees Retirement System'), ('PLN', 'Planning Commissioner'), ('SCJ', 'Superior Court Judge'), ('SEN', 'State Senator'), ('SHC', 'Sheriff-Coroner'), ('SOS', 'Secretary of State'), ('SPM', 'Supreme Court Justice'), ('SUP', 'Superintendent of Public Instruction'), ('TRE', 'State Treasurer'), ('TRS', 'Local Treasurer'), ('???', 'Unknown value')], help_text='If the payment went toward supporting/opposing a candidate,code describing the office (from EXPN_CD.OFFICE_CD)', max_length=3, verbose_name='office code')),
('office_description', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate,description of the office (from EXPN_CD.OFFIC_DSCR)', max_length=40, verbose_name='office description')),
('transaction_id', models.CharField(help_text='Identifies a unique transaction across versions of the a given Form 460 filing (from EXPN_CD.TRAN_ID)', max_length=20, verbose_name='transaction id')),
('memo_reference_number', models.CharField(blank=True, help_text="A value assigned by the filer which refers to the item'sfootnote in the TEXT_MEMO_CD table (from EXPN_CD.MEMO_REFNO)", max_length=20, verbose_name='memo reference number')),
('filing', models.ForeignKey(help_text='Foreign key referring to the Form 461 on which the payment was reported (from EXPN_CD.FILING_ID)', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='part_5_items', to='calaccess_processed_filings.Form461Filing')),
],
options={
'verbose_name': 'Form 461 (Campaign Disclosure) Part 5 item',
},
),
migrations.CreateModel(
name='Form461Part5ItemVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('line_item', models.IntegerField(help_text='Line number of the filing form where the payment is itemized (from EXPN_CD.LINE_ITEM)', verbose_name='line item')),
('payee_code', models.CharField(blank=True, choices=[('BNM', "Ballot measure's name/title"), ('CAO', 'Candidate/officeholder'), ('COM', 'Committee'), ('IND', 'Individual'), ('MBR', 'Member of Associaton'), ('OFF', 'Officer'), ('OTH', 'Other'), ('PTY', 'Political Party'), ('RCP', 'Recipient committee'), ('SCC', 'Small Contributor Committee'), ('???', 'Unknown value')], help_text='Code describing the payee (from EXPN_CD.ENTITY_CD)', max_length=3, verbose_name='payee code')),
('payee_committee_id', models.CharField(blank=True, help_text="Payee's filer identification number, if it is a committee (from EXPN_CD.CMTE_ID)", max_length=9, verbose_name='payee committee id')),
('payee_title', models.CharField(blank=True, help_text='Name title of the payee (from EXPN_CD.PAYEE_NAMT)', max_length=10, verbose_name='payee title')),
('payee_lastname', models.CharField(blank=True, help_text='Last name of the payee or business name (from EXPN_CD.PAYEE_NAML)', max_length=200, verbose_name='payee lastname')),
('payee_firstname', models.CharField(help_text='First name of the payee (from EXPN_CD.PAYEE_NAMF)', max_length=45, verbose_name='payee firstname')),
('payee_name_suffix', models.CharField(blank=True, help_text='Name suffix of the payee (from EXPN_CD.PAYEE_NAMS)', max_length=10, verbose_name='payee name suffix')),
('payee_city', models.CharField(blank=True, help_text='City of the payee (from EXPN_CD.PAYEE_CITY)', max_length=30, verbose_name='payee city')),
('payee_state', models.CharField(blank=True, help_text='State of the payee (from EXPN_CD.PAYEE_ST)', max_length=2, verbose_name='payee state')),
('payee_zip', models.CharField(blank=True, help_text='Zip code (usually zip5, sometimes zip9) of the payee (from EXPN_CD.PAYEE_ZIP4)', max_length=10, verbose_name='payee zip')),
('treasurer_title', models.CharField(blank=True, help_text="Name title of the payee committee's treasurer (from EXPN_CD.TRES_NAMT)", max_length=10, verbose_name='treasurer title')),
('treasurer_lastname', models.CharField(blank=True, help_text="Last name of the payee committee's treasurer (from EXPN_CD.TRES_NAML)", max_length=200, verbose_name='treasurer lastname')),
('treasurer_firstname', models.CharField(help_text="First name of the payee committee's treasurer (from EXPN_CD.TRES_NAMF)", max_length=45, verbose_name='treasurer firstname')),
('treasurer_name_suffix', models.CharField(blank=True, help_text="Name suffix of the payee committee's treasurer (from EXPN_CD.TRES_NAMS)", max_length=10, verbose_name='treasurer name suffix')),
('treasurer_city', models.CharField(blank=True, help_text="City of the payee committee's treasurer (from EXPN_CD.TRES_CITY)", max_length=30, verbose_name='treasurer city')),
('treasurer_state', models.CharField(blank=True, help_text="State of the payee committee's treasurer (from EXPN_CD.TRES_ST)", max_length=2, verbose_name='treasurer state')),
('treasurer_zip', models.CharField(blank=True, help_text="Zip code (usually zip5, sometimes zip9) of the payee committee's treasurer (from EXPN_CD.TRES_ZIP4)", max_length=10, verbose_name='treasurer zip')),
('payment_code', models.CharField(blank=True, choices=[('CMP', 'Campaign paraphernalia/miscellaneous'), ('CNS', 'Campaign consultants'), ('CTB', 'Contribution (if nonmonetary, explain)*'), ('CVC', 'Civic donations'), ('FIL', 'Candidate filing/ballot feeds'), ('FND', 'Fundraising events'), ('IKD', 'In-kind contribution (nonmonetary)'), ('IND', 'Independent expenditure supporting/opposing others (explain)*'), ('LEG', 'Legal defense'), ('LIT', 'Campaign literature and mailings'), ('LON', 'Loan'), ('MBR', 'Member communications'), ('MON', 'Monetary contribution'), ('MTG', 'Meetings and appearances'), ('OFC', 'Office expenses'), ('PET', 'Petition circulating'), ('PHO', 'Phone banks'), ('POL', 'Polling and survey research'), ('POS', 'Postage, delivery and messenger services'), ('PRO', 'Professional services (legal, accounting)'), ('PRT', 'Print ads'), ('RAD', 'Radio airtime and production costs'), ('RFD', 'Returned contributions'), ('SAL', 'Campaign workers salaries'), ('TEL', 'T.V. or cable airtime and production costs'), ('TRC', 'Candidate travel, lodging and meals (explain)'), ('TRS', 'Staff/spouse travel, lodging and meals (explain)'), ('TSF', 'Transfer between committees of the same candidate/sponsor'), ('VOT', 'Voter registration'), ('WEB', 'Information technology costs (internet, e-mail)'), ('???', 'Unknown value')], help_text='Code describing the payment (from EXPN_CD.EXPN_CODE)', max_length=3, verbose_name='payment code')),
('payment_description', models.CharField(blank=True, help_text='Purpose of payment and/or description/explanation (from EXPN_CD.EXPN_DSCR)', max_length=400, verbose_name='payment description')),
('amount', models.DecimalField(decimal_places=2, help_text='Amount paid to the payee in the period covered by the filing (from EXPN_CD.AMOUNT)', max_digits=14, verbose_name='amount')),
('cumulative_ytd_amount', models.DecimalField(decimal_places=2, help_text="Cumulative year-to-date amount given or spent by the filer in support or opposition of the candidate or ballot measure as of the Form 460's filing date (from EXPN_CD.CUM_YTD)", max_digits=14, null=True)),
('expense_date', models.DateField(help_text='Date or expense (from EXPN_CD.EXPN_DATE)', null=True, verbose_name='expense date')),
('check_number', models.CharField(blank=True, help_text='Optional check number for the payment made by the campaign filer (from EXPN_CD.EXPN_CHKNO)', max_length=20, verbose_name='expense check number')),
('support_oppose_code', models.CharField(blank=True, choices=[('S', 'Support'), ('O', 'Oppose'), ('?', 'Unknown value')], help_text='If applicable, code indicating whether the payment went toward supporting or opposing a candidate/ballot measure (from EXPN_CD.SUP_OPP_CD)', max_length=1, verbose_name='support oppose code')),
('ballot_measure_jurisdiction', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, the jurisdiction subject to the ballot measure (from EXPN_CD.BAL_JURIS)', max_length=40, verbose_name='ballot measure jurisdiction')),
('ballot_measure_name', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, name of the ballot measure (from EXPN_CD.BAL_NAME or EXPN_CD.CAND_NAML)', max_length=200, verbose_name='ballot measure name')),
('ballot_measure_num', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a ballot measure, ballot number or letter (from EXPN_CD.BAL_NUM)', max_length=7, verbose_name='ballot measure number')),
('candidate_title', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, name title of the candidate (from EXPN_CD.CAND_NAMT)', max_length=10, verbose_name='candidate title')),
('candidate_lastname', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, last name of the candidate or business name (from EXPN_CD.CAND_NAML)', max_length=200, verbose_name='candidate lastname')),
('candidate_firstname', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, first name of the candidate (from EXPN_CD.CAND_NAMF)', max_length=45, verbose_name='candidate firstname')),
('candidate_name_suffix', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate, name suffix of the candidate (from EXPN_CD.CAND_NAMS)', max_length=10, verbose_name='candidate name suffix')),
('candidate_jurisdiction_code', models.CharField(blank=True, choices=[('ASM', 'Assembly District'), ('BOE', 'Board of Equalization District'), ('CIT', 'City'), ('CTY', 'County'), ('LOC', 'Local'), ('OTH', 'Other'), ('SEN', 'Senate District'), ('STW', 'Statewide'), ('???', 'Statewide')], help_text='If the payment went toward supporting/opposing a candidate,code indicating the jurisdiction of the office (from EXPN_CD.JURIS_CD)', max_length=3, verbose_name='candidate jurisdiction')),
('candidate_jurisdiction_description', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a county, city or local candidate, full description of the office (from EXPN_CD.JURIS_DSCR)', max_length=40, verbose_name='candidate jurisdiciton description')),
('candidate_district', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate,for state senate, assembly or local board of education, thedistrict of the office (from EXPN_CD.DIST_NO)', max_length=3, verbose_name='candidate district')),
('office_sought_held', models.CharField(blank=True, choices=[('S', 'SOUGHT'), ('H', 'HELD')], help_text='If the payment went toward supporting/opposing a candidate,code indicating if the candidate is seeking or currently holds the office (from EXPN_CD.OFF_S_H_CD)', max_length=1, verbose_name='office sought or held')),
('office_code', models.CharField(blank=True, choices=[('APP', 'State Appellate Court Justice'), ('ASM', 'State Assembly Person'), ('ASR', 'Assessor'), ('ATT', 'Attorney General'), ('BED', 'Board of Education'), ('BOE', 'Board of Equalization Member'), ('BSU', 'Board of Supervisors'), ('CAT', 'City Attorney'), ('CCB', 'Community College Board'), ('CCM', 'City Council Member'), ('CON', 'State Controller'), ('COU', 'County Counsel'), ('CSU', 'County Supervisor'), ('CTR', 'Local Controller'), ('DAT', 'District Attorney'), ('GOV', 'Governor'), ('INS', 'Insurance Commissioner'), ('LTG', 'Lieutenant Governor'), ('MAY', 'Mayor'), ('OTH', 'Other'), ('PDR', 'Public Defender'), ('PER', 'Public Employees Retirement System'), ('PLN', 'Planning Commissioner'), ('SCJ', 'Superior Court Judge'), ('SEN', 'State Senator'), ('SHC', 'Sheriff-Coroner'), ('SOS', 'Secretary of State'), ('SPM', 'Supreme Court Justice'), ('SUP', 'Superintendent of Public Instruction'), ('TRE', 'State Treasurer'), ('TRS', 'Local Treasurer'), ('???', 'Unknown value')], help_text='If the payment went toward supporting/opposing a candidate,code describing the office (from EXPN_CD.OFFICE_CD)', max_length=3, verbose_name='office code')),
('office_description', models.CharField(blank=True, help_text='If the payment went toward supporting/opposing a candidate,description of the office (from EXPN_CD.OFFIC_DSCR)', max_length=40, verbose_name='office description')),
('transaction_id', models.CharField(help_text='Identifies a unique transaction across versions of the a given Form 460 filing (from EXPN_CD.TRAN_ID)', max_length=20, verbose_name='transaction id')),
('memo_reference_number', models.CharField(blank=True, help_text="A value assigned by the filer which refers to the item'sfootnote in the TEXT_MEMO_CD table (from EXPN_CD.MEMO_REFNO)", max_length=20, verbose_name='memo reference number')),
('filing_version', models.ForeignKey(help_text='Foreign key referring to the version of the Form 461 that includes the payment made', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='part_5_items', to='calaccess_processed_filings.Form461FilingVersion')),
],
options={
'verbose_name': 'Form 461 (Campaign Disclosure) Part 5 item version',
},
),
migrations.AlterUniqueTogether(
name='form461part5itemversion',
unique_together={('filing_version', 'line_item')},
),
migrations.AlterIndexTogether(
name='form461part5itemversion',
index_together={('filing_version', 'line_item')},
),
migrations.AlterUniqueTogether(
name='form461part5item',
unique_together={('filing', 'line_item')},
),
]
| 201.744
| 1,466
| 0.70644
| 3,328
| 25,218
| 5.190204
| 0.123498
| 0.028136
| 0.045736
| 0.088925
| 0.964743
| 0.964743
| 0.960227
| 0.960227
| 0.960227
| 0.954669
| 0
| 0.011045
| 0.1491
| 25,218
| 124
| 1,467
| 203.370968
| 0.793923
| 0.001784
| 0
| 0.822034
| 1
| 0.288136
| 0.573994
| 0.029319
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016949
| 0
| 0.042373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
cc3ecabff931c1cc74fb3c97734ef7b7d36d347d
| 190
|
py
|
Python
|
message_passing_nn/__init__.py
|
kovanostra/message-passing-nn
|
6617a4753173c8fffc60140b9d8d0f497b33aed4
|
[
"MIT"
] | 12
|
2020-06-14T03:00:16.000Z
|
2022-01-05T09:51:07.000Z
|
message_passing_nn/__init__.py
|
kovanostra/message-passing-nn
|
6617a4753173c8fffc60140b9d8d0f497b33aed4
|
[
"MIT"
] | null | null | null |
message_passing_nn/__init__.py
|
kovanostra/message-passing-nn
|
6617a4753173c8fffc60140b9d8d0f497b33aed4
|
[
"MIT"
] | 1
|
2020-11-21T13:04:56.000Z
|
2020-11-21T13:04:56.000Z
|
from message_passing_nn.create_message_passing_nn import create_grid_search
from message_passing_nn.create_message_passing_nn import create_inference
from message_passing_nn.cli import main
| 47.5
| 75
| 0.921053
| 30
| 190
| 5.333333
| 0.366667
| 0.4375
| 0.5
| 0.375
| 0.675
| 0.675
| 0.675
| 0.675
| 0.675
| 0.675
| 0
| 0
| 0.063158
| 190
| 3
| 76
| 63.333333
| 0.898876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
ccd4492c0bc697e20043fad0612bc1dba8fc7fbf
| 38
|
py
|
Python
|
hale_hub/frontend/__init__.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
hale_hub/frontend/__init__.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
hale_hub/frontend/__init__.py
|
tantinlala/hale-hub
|
da2e6d24e3869ee533d2e272ce87b9e7eede9a79
|
[
"MIT"
] | null | null | null |
from .views import frontend_blueprint
| 19
| 37
| 0.868421
| 5
| 38
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 38
| 1
| 38
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
aed7c4bacb43c4b0c579d2ff7b5d5debe953bd9c
| 145
|
py
|
Python
|
src/extended_webdrivers/edge.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | null | null | null |
src/extended_webdrivers/edge.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | null | null | null |
src/extended_webdrivers/edge.py
|
dillonm197/extended-webdrivers
|
9cb4cdb75f37c66ee1ac7fa13b947ae3bcb17863
|
[
"MIT"
] | 1
|
2019-08-07T01:48:36.000Z
|
2019-08-07T01:48:36.000Z
|
from selenium.webdriver import Edge as _Edge
from .extended_webdriver import ExtendedWebdriver
class Edge(ExtendedWebdriver, _Edge):
pass
| 18.125
| 49
| 0.813793
| 17
| 145
| 6.764706
| 0.588235
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144828
| 145
| 7
| 50
| 20.714286
| 0.927419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
9d7e0976c3c20024f6af9f0595d928d4f3b57103
| 100
|
py
|
Python
|
ips/ip/i2c_slave_subad/__init__.py
|
zld012739/zldrepository
|
5635b78a168956091676ef4dd99fa564be0e5ba0
|
[
"MIT"
] | null | null | null |
ips/ip/i2c_slave_subad/__init__.py
|
zld012739/zldrepository
|
5635b78a168956091676ef4dd99fa564be0e5ba0
|
[
"MIT"
] | null | null | null |
ips/ip/i2c_slave_subad/__init__.py
|
zld012739/zldrepository
|
5635b78a168956091676ef4dd99fa564be0e5ba0
|
[
"MIT"
] | null | null | null |
from i2c_slave_subad_partial import get_ip_name
from i2c_slave_subad_partial import I2C_SLAVE_SUBAD
| 33.333333
| 51
| 0.92
| 18
| 100
| 4.555556
| 0.5
| 0.292683
| 0.47561
| 0.414634
| 0.731707
| 0.731707
| 0
| 0
| 0
| 0
| 0
| 0.032609
| 0.08
| 100
| 2
| 52
| 50
| 0.858696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
9db86cd5d1f088029840302a9a06f1a757cc580e
| 14,578
|
py
|
Python
|
sdk/python/pulumi_azure/apimanagement/gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/apimanagement/gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/apimanagement/gateway.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['GatewayArgs', 'Gateway']
@pulumi.input_type
class GatewayArgs:
def __init__(__self__, *,
api_management_id: pulumi.Input[str],
location_data: pulumi.Input['GatewayLocationDataArgs'],
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Gateway resource.
:param pulumi.Input['GatewayLocationDataArgs'] location_data: A `location_data` block as documented below.
:param pulumi.Input[str] description: The description of the API Management Gateway.
:param pulumi.Input[str] name: The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
pulumi.set(__self__, "api_management_id", api_management_id)
pulumi.set(__self__, "location_data", location_data)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: pulumi.Input[str]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter(name="locationData")
def location_data(self) -> pulumi.Input['GatewayLocationDataArgs']:
"""
A `location_data` block as documented below.
"""
return pulumi.get(self, "location_data")
@location_data.setter
def location_data(self, value: pulumi.Input['GatewayLocationDataArgs']):
pulumi.set(self, "location_data", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the API Management Gateway.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _GatewayState:
def __init__(__self__, *,
api_management_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
location_data: Optional[pulumi.Input['GatewayLocationDataArgs']] = None,
name: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Gateway resources.
:param pulumi.Input[str] description: The description of the API Management Gateway.
:param pulumi.Input['GatewayLocationDataArgs'] location_data: A `location_data` block as documented below.
:param pulumi.Input[str] name: The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
if api_management_id is not None:
pulumi.set(__self__, "api_management_id", api_management_id)
if description is not None:
pulumi.set(__self__, "description", description)
if location_data is not None:
pulumi.set(__self__, "location_data", location_data)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "api_management_id")
@api_management_id.setter
def api_management_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_management_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the API Management Gateway.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="locationData")
def location_data(self) -> Optional[pulumi.Input['GatewayLocationDataArgs']]:
"""
A `location_data` block as documented below.
"""
return pulumi.get(self, "location_data")
@location_data.setter
def location_data(self, value: Optional[pulumi.Input['GatewayLocationDataArgs']]):
pulumi.set(self, "location_data", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
class Gateway(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
location_data: Optional[pulumi.Input[pulumi.InputType['GatewayLocationDataArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages an API Management Gateway.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="pub1",
publisher_email="pub1@email.com",
sku_name="Consumption_0")
example_gateway = azure.apimanagement.Gateway("exampleGateway",
api_management_id=example_service.id,
description="Example API Management gateway",
location_data=azure.apimanagement.GatewayLocationDataArgs(
name="example name",
city="example city",
district="example district",
region="example region",
))
```
## Import
API Management Gateways can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/gateway:Gateway example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/service1/gateways/gateway1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The description of the API Management Gateway.
:param pulumi.Input[pulumi.InputType['GatewayLocationDataArgs']] location_data: A `location_data` block as documented below.
:param pulumi.Input[str] name: The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: GatewayArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages an API Management Gateway.
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.ResourceGroup("exampleResourceGroup", location="West Europe")
example_service = azure.apimanagement.Service("exampleService",
location=example_resource_group.location,
resource_group_name=example_resource_group.name,
publisher_name="pub1",
publisher_email="pub1@email.com",
sku_name="Consumption_0")
example_gateway = azure.apimanagement.Gateway("exampleGateway",
api_management_id=example_service.id,
description="Example API Management gateway",
location_data=azure.apimanagement.GatewayLocationDataArgs(
name="example name",
city="example city",
district="example district",
region="example region",
))
```
## Import
API Management Gateways can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:apimanagement/gateway:Gateway example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.ApiManagement/service/service1/gateways/gateway1
```
:param str resource_name: The name of the resource.
:param GatewayArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(GatewayArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
location_data: Optional[pulumi.Input[pulumi.InputType['GatewayLocationDataArgs']]] = None,
name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = GatewayArgs.__new__(GatewayArgs)
if api_management_id is None and not opts.urn:
raise TypeError("Missing required property 'api_management_id'")
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["description"] = description
if location_data is None and not opts.urn:
raise TypeError("Missing required property 'location_data'")
__props__.__dict__["location_data"] = location_data
__props__.__dict__["name"] = name
super(Gateway, __self__).__init__(
'azure:apimanagement/gateway:Gateway',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
api_management_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
location_data: Optional[pulumi.Input[pulumi.InputType['GatewayLocationDataArgs']]] = None,
name: Optional[pulumi.Input[str]] = None) -> 'Gateway':
"""
Get an existing Gateway resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] description: The description of the API Management Gateway.
:param pulumi.Input[pulumi.InputType['GatewayLocationDataArgs']] location_data: A `location_data` block as documented below.
:param pulumi.Input[str] name: The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _GatewayState.__new__(_GatewayState)
__props__.__dict__["api_management_id"] = api_management_id
__props__.__dict__["description"] = description
__props__.__dict__["location_data"] = location_data
__props__.__dict__["name"] = name
return Gateway(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiManagementId")
def api_management_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "api_management_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the API Management Gateway.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="locationData")
def location_data(self) -> pulumi.Output['outputs.GatewayLocationData']:
"""
A `location_data` block as documented below.
"""
return pulumi.get(self, "location_data")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name which should be used for the API Management Gateway. Changing this forces a new API Management Gateway to be created.
"""
return pulumi.get(self, "name")
| 42.377907
| 211
| 0.652147
| 1,612
| 14,578
| 5.665012
| 0.114764
| 0.081143
| 0.056724
| 0.057819
| 0.81855
| 0.796211
| 0.776172
| 0.763798
| 0.739816
| 0.729523
| 0
| 0.007054
| 0.2512
| 14,578
| 343
| 212
| 42.501458
| 0.829516
| 0.362738
| 0
| 0.644068
| 1
| 0
| 0.122076
| 0.0321
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152542
| false
| 0.00565
| 0.039548
| 0.016949
| 0.282486
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9dc9331d0b2bd8370cf695f96ddcfd13628a9c27
| 2,304
|
py
|
Python
|
run.py
|
80085/professorspelet
|
7a36961358673002d88671c31b61731c9e934994
|
[
"MIT"
] | null | null | null |
run.py
|
80085/professorspelet
|
7a36961358673002d88671c31b61731c9e934994
|
[
"MIT"
] | null | null | null |
run.py
|
80085/professorspelet
|
7a36961358673002d88671c31b61731c9e934994
|
[
"MIT"
] | null | null | null |
from professorspelet.professorspelet import Professor, Tile, Puzzle
if __name__ == '__main__':
tiles = [
Tile([Professor('lower', 'green'), Professor('lower', 'brown'), Professor('upper', 'purple'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'green'), Professor('upper', 'blue'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'purple'), Professor('upper', 'green'), Professor('upper', 'brown')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'green'), Professor('upper', 'green'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'blue'), Professor('lower', 'green'), Professor('upper', 'brown'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'purple'), Professor('upper', 'green'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'green'), Professor('lower', 'blue'), Professor('upper', 'green'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'green'), Professor('lower', 'blue'), Professor('upper', 'brown'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'blue'), Professor('lower', 'brown'), Professor('upper', 'brown'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'green'), Professor('lower', 'purple'), Professor('upper', 'blue'), Professor('upper', 'brown')]),
Tile([Professor('lower', 'green'), Professor('lower', 'green'), Professor('upper', 'brown'), Professor('upper', 'purple')]),
Tile([Professor('lower', 'green'), Professor('lower', 'purple'), Professor('upper', 'brown'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'purple'), Professor('upper', 'green'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'blue'), Professor('upper', 'green'), Professor('upper', 'green')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'purple'), Professor('upper', 'brown'), Professor('upper', 'blue')]),
Tile([Professor('lower', 'brown'), Professor('lower', 'blue'), Professor('upper', 'purple'), Professor('upper', 'green')])
]
puzzle = Puzzle(tiles)
for s in puzzle.solution():
print(*s)
| 92.16
| 132
| 0.609809
| 229
| 2,304
| 6.100437
| 0.091703
| 0.320687
| 0.206156
| 0.200429
| 0.909091
| 0.867573
| 0.807445
| 0.807445
| 0.793844
| 0.452398
| 0
| 0
| 0.13151
| 2,304
| 24
| 133
| 96
| 0.698151
| 0
| 0
| 0.086957
| 0
| 0
| 0.28125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.043478
| 0.043478
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d1ba5bb2c91de6a7fef18386644e91b67188984e
| 78
|
py
|
Python
|
python/testData/resolve/multiFile/dunderAllConflict/DunderAllConflict.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/resolve/multiFile/dunderAllConflict/DunderAllConflict.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/resolve/multiFile/dunderAllConflict/DunderAllConflict.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from mypackage1 import do_stuff
from mypackage2 import *
do_stuff()
# <ref>
| 13
| 31
| 0.75641
| 11
| 78
| 5.181818
| 0.636364
| 0.280702
| 0.45614
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030769
| 0.166667
| 78
| 5
| 32
| 15.6
| 0.846154
| 0.064103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d1d65dc2638adcdfbf63791b45078b6b4f52bf4d
| 288
|
py
|
Python
|
src/pybel_tools/selection/__init__.py
|
cthoyt/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 6
|
2017-03-09T14:28:19.000Z
|
2019-11-21T00:00:15.000Z
|
src/pybel_tools/selection/__init__.py
|
pybel/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 134
|
2016-11-24T11:10:03.000Z
|
2020-09-10T17:03:13.000Z
|
src/pybel_tools/selection/__init__.py
|
cthoyt/pybel-tools
|
0bc9f9267178d164dab210f24255793cf4e32ab0
|
[
"MIT"
] | 5
|
2017-03-08T13:28:02.000Z
|
2020-04-05T02:23:17.000Z
|
# -*- coding: utf-8 -*-
"""Functions to help select data from networks."""
from .group_nodes import * # noqa: F401,F403
from .metapaths import * # noqa: F401,F403
from .paths import * # noqa: F401,F403
from .search import * # noqa: F401,F403
from .utils import * # noqa: F401,F403
| 28.8
| 50
| 0.670139
| 41
| 288
| 4.682927
| 0.487805
| 0.260417
| 0.364583
| 0.46875
| 0.458333
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132479
| 0.1875
| 288
| 9
| 51
| 32
| 0.688034
| 0.510417
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ae08c7f151b01a4a80b27ca76bab423b1c53297d
| 150
|
py
|
Python
|
models/oauth/microsoft/__init__.py
|
Lulla-App/data_access
|
c8ebe473465d91d70f70ed2151e3ec7fa4ac5b0c
|
[
"MIT"
] | null | null | null |
models/oauth/microsoft/__init__.py
|
Lulla-App/data_access
|
c8ebe473465d91d70f70ed2151e3ec7fa4ac5b0c
|
[
"MIT"
] | null | null | null |
models/oauth/microsoft/__init__.py
|
Lulla-App/data_access
|
c8ebe473465d91d70f70ed2151e3ec7fa4ac5b0c
|
[
"MIT"
] | null | null | null |
from .microsoft_oauth_token import MicrosoftOAuthToken
from .microsoft_scope import MicrosoftScope
from .microsoft_scope_uri import MicrosoftScopeUri
| 37.5
| 54
| 0.9
| 17
| 150
| 7.647059
| 0.588235
| 0.3
| 0.276923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 3
| 55
| 50
| 0.942029
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ae209afeb4625ab335c51c2df3e2fa47807cfc1c
| 35,625
|
py
|
Python
|
CalculoMatriz/hog/svm_results.py
|
Corlobin/Bibliometria-Mestrado
|
c9063bc685fde40aeb59e19feb7d6ec22ba91310
|
[
"Apache-2.0"
] | null | null | null |
CalculoMatriz/hog/svm_results.py
|
Corlobin/Bibliometria-Mestrado
|
c9063bc685fde40aeb59e19feb7d6ec22ba91310
|
[
"Apache-2.0"
] | null | null | null |
CalculoMatriz/hog/svm_results.py
|
Corlobin/Bibliometria-Mestrado
|
c9063bc685fde40aeb59e19feb7d6ec22ba91310
|
[
"Apache-2.0"
] | null | null | null |
from pycm import *
from util.funcoes import *
def calculaGaussiano():
ypred = ['01', '01', '04', '13', '04', '13', '14', '13', '03', '14', '03', '23', '14', '13', '13', '04', '01', '03',
'04', '03', '01', '13', '04', '04', '04', '04', '13', '03', '04', '13', '11', '03', '29', '03', '01', '03',
'13', '02', '28', '13', '13', '13', '13', '13', '13', '29', '04', '13', '13', '13', '04', '03', '04', '13',
'13', '14', '09', '13', '23', '17', '04', '04', '13', '13', '04', '04', '04', '13', '04', '04', '04', '04',
'04', '04', '13', '13', '13', '04', '04', '04', '03', '13', '04', '13', '04', '13', '04', '04', '04', '04',
'04', '04', '04', '04', '04', '04', '04', '04', '13', '04', '13', '04', '13', '04', '04', '13', '04', '04',
'04', '04', '04', '04', '13', '13', '04', '04', '04', '04', '13', '04', '05', '22', '05', '03', '11', '13',
'04', '13', '15', '20', '24', '12', '13', '01', '05', '03', '13', '13', '13', '05', '13', '04', '13', '14',
'08', '29', '03', '23', '07', '04', '06', '13', '06', '04', '13', '04', '12', '04', '13', '12', '04', '13',
'13', '06', '12', '13', '12', '13', '13', '13', '04', '13', '13', '13', '04', '04', '19', '13', '13', '13',
'15', '13', '23', '22', '12', '15', '04', '23', '04', '03', '04', '03', '28', '14', '13', '15', '04', '03',
'09', '02', '03', '03', '13', '13', '03', '15', '04', '04', '13', '13', '24', '08', '24', '24', '16', '04',
'29', '08', '24', '24', '12', '13', '24', '04', '04', '16', '08', '21', '08', '08', '04', '13', '03', '24',
'01', '08', '24', '04', '29', '16', '09', '04', '19', '15', '13', '12', '07', '03', '09', '05', '13', '04',
'04', '04', '09', '12', '07', '19', '04', '04', '09', '16', '09', '09', '09', '02', '25', '09', '09', '04',
'02', '13', '13', '03', '03', '11', '13', '29', '13', '13', '04', '07', '09', '13', '04', '12', '13', '04',
'04', '04', '04', '04', '04', '19', '14', '04', '04', '03', '14', '15', '08', '03', '23', '17', '13', '03',
'11', '13', '16', '14', '03', '01', '04', '03', '03', '03', '13', '08', '23', '11', '04', '13', '15', '12',
'14', '04', '04', '27', '01', '13', '09', '10', '13', '08', '13', '02', '12', '13', '12', '14', '04', '04',
'02', '24', '07', '04', '04', '14', '04', '10', '13', '08', '03', '14', '12', '03', '12', '13', '12', '14',
'13', '04', '04', '04', '04', '04', '04', '04', '13', '04', '04', '13', '04', '04', '04', '04', '13', '04',
'04', '04', '13', '04', '04', '03', '04', '04', '04', '04', '13', '13', '01', '13', '13', '13', '04', '13',
'03', '04', '04', '04', '13', '14', '04', '02', '04', '04', '13', '04', '24', '03', '14', '04', '04', '17',
'13', '13', '04', '04', '13', '13', '13', '04', '13', '04', '23', '15', '13', '04', '04', '04', '14', '04',
'13', '04', '04', '04', '13', '04', '09', '09', '07', '04', '04', '13', '04', '13', '13', '04', '13', '13',
'16', '16', '24', '16', '24', '13', '13', '16', '16', '08', '16', '13', '16', '16', '12', '19', '02', '09',
'08', '04', '16', '12', '13', '03', '14', '13', '16', '05', '16', '15', '04', '02', '13', '03', '04', '13',
'14', '11', '04', '05', '25', '04', '04', '13', '04', '13', '13', '04', '14', '13', '13', '17', '04', '17',
'03', '04', '13', '04', '04', '13', '18', '04', '18', '18', '18', '14', '13', '09', '13', '09', '01', '11',
'02', '09', '03', '04', '18', '11', '17', '13', '12', '04', '13', '13', '13', '01', '13', '14', '01', '13',
'24', '24', '19', '12', '14', '03', '13', '03', '03', '13', '08', '29', '04', '13', '13', '16', '08', '21',
'13', '04', '04', '04', '03', '24', '19', '19', '03', '16', '03', '05', '24', '20', '03', '20', '28', '02',
'03', '04', '20', '02', '20', '13', '08', '03', '13', '04', '04', '12', '15', '13', '13', '04', '13', '04',
'13', '03', '04', '13', '04', '13', '03', '22', '01', '02', '19', '02', '13', '23', '28', '13', '04', '02',
'21', '27', '13', '08', '01', '03', '23', '15', '02', '21', '29', '02', '28', '03', '14', '21', '14', '04',
'04', '13', '04', '13', '04', '13', '04', '04', '03', '04', '04', '04', '04', '04', '17', '04', '03', '04',
'04', '04', '04', '04', '04', '04', '04', '13', '04', '13', '04', '04', '03', '04', '13', '13', '04', '04',
'15', '13', '04', '04', '13', '03', '04', '04', '04', '13', '04', '11', '04', '04', '04', '04', '04', '04',
'13', '04', '04', '13', '04', '04', '08', '02', '10', '24', '24', '08', '08', '13', '12', '04', '24', '03',
'13', '04', '02', '08', '24', '03', '04', '09', '19', '24', '24', '24', '13', '13', '12', '04', '12', '24',
'04', '13', '10', '13', '03', '25', '13', '13', '13', '03', '13', '04', '04', '04', '03', '03', '13', '13',
'03', '13', '13', '03', '13', '13', '13', '17', '13', '13', '13', '04', '13', '03', '13', '04', '13', '03',
'04', '04', '04', '04', '04', '13', '03', '04', '04', '04', '13', '04', '13', '13', '13', '13', '04', '08',
'13', '04', '13', '04', '04', '13', '04', '13', '14', '04', '13', '15', '13', '13', '04', '04', '04', '07',
'03', '04', '04', '13', '04', '13', '13', '04', '04', '04', '04', '13', '03', '13', '13', '02', '13', '13',
'04', '13', '03', '13', '03', '28', '04', '13', '03', '13', '14', '13', '03', '04', '04', '28', '14', '13',
'03', '13', '04', '13', '04', '13', '04', '13', '07', '28', '04', '15', '04', '03', '04', '04', '04', '04',
'04', '04', '04', '04', '25', '04', '11', '23', '14', '13', '13', '14', '13', '11', '04', '17', '03', '04',
'04', '04', '11', '16', '08', '04']
ytest = ['01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01',
'01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29']
cm = ConfusionMatrix(actual_vector=ytest, predict_vector=ypred)
print(cm)
print(calculaMediaMicroTPR(cm))
def calculaPolynomial():
ypred = ['21', '02', '01', '26', '25', '25', '19', '03', '17', '01', '28', '16', '01', '26', '17', '22', '02', '01',
'29', '01', '02', '14', '04', '04', '27', '13', '01', '14', '26', '04', '10', '02', '11', '19', '11', '14',
'14', '21', '29', '01', '04', '01', '03', '25', '10', '11', '03', '01', '13', '02', '11', '13', '22', '02',
'02', '01', '12', '20', '29', '02', '03', '04', '17', '25', '25', '04', '03', '22', '22', '04', '03', '04',
'04', '13', '02', '03', '04', '25', '13', '03', '19', '01', '02', '17', '03', '04', '03', '13', '14', '01',
'04', '26', '04', '14', '04', '04', '15', '04', '04', '04', '13', '04', '20', '16', '04', '04', '04', '04',
'26', '04', '26', '26', '04', '04', '04', '04', '13', '04', '13', '13', '05', '10', '12', '17', '05', '07',
'26', '28', '05', '20', '24', '24', '13', '05', '05', '16', '15', '25', '01', '05', '14', '22', '01', '27',
'08', '16', '25', '05', '16', '15', '06', '15', '06', '04', '06', '29', '12', '23', '13', '12', '06', '23',
'06', '16', '24', '06', '24', '13', '06', '16', '13', '23', '26', '06', '06', '06', '08', '06', '06', '06',
'02', '15', '09', '11', '12', '29', '14', '09', '22', '20', '28', '20', '09', '28', '09', '01', '25', '16',
'24', '02', '18', '18', '16', '11', '09', '02', '18', '15', '11', '28', '08', '08', '08', '08', '20', '23',
'16', '08', '24', '24', '08', '04', '08', '18', '23', '16', '08', '08', '08', '08', '27', '26', '03', '08',
'19', '08', '24', '23', '08', '16', '20', '11', '19', '10', '05', '12', '12', '10', '19', '05', '19', '23',
'23', '18', '12', '12', '07', '19', '18', '18', '08', '16', '24', '09', '09', '18', '17', '09', '09', '25',
'07', '17', '15', '14', '15', '24', '05', '21', '15', '25', '17', '20', '29', '17', '27', '12', '13', '25',
'22', '13', '13', '07', '13', '19', '25', '10', '03', '01', '29', '17', '08', '14', '09', '11', '02', '14',
'21', '02', '16', '10', '27', '08', '11', '25', '29', '07', '23', '08', '27', '16', '29', '03', '11', '12',
'02', '03', '15', '07', '12', '28', '09', '10', '04', '08', '13', '20', '12', '14', '12', '07', '15', '02',
'16', '08', '12', '29', '22', '18', '01', '19', '17', '08', '20', '26', '12', '07', '09', '26', '12', '07',
'11', '13', '26', '15', '13', '14', '13', '13', '15', '04', '13', '29', '27', '17', '13', '17', '16', '03',
'13', '23', '15', '13', '23', '20', '14', '15', '04', '13', '04', '14', '07', '13', '15', '26', '13', '14',
'20', '26', '04', '01', '26', '01', '14', '02', '15', '18', '14', '14', '24', '01', '01', '17', '22', '09',
'15', '02', '01', '01', '14', '12', '04', '13', '13', '15', '10', '07', '28', '25', '03', '22', '28', '25',
'13', '15', '13', '10', '05', '13', '09', '09', '12', '15', '27', '15', '09', '26', '25', '29', '14', '04',
'16', '16', '24', '16', '24', '23', '23', '16', '16', '08', '16', '27', '24', '16', '20', '19', '20', '16',
'08', '03', '16', '12', '16', '07', '22', '28', '16', '24', '16', '26', '01', '02', '23', '25', '25', '26',
'11', '09', '13', '05', '21', '23', '17', '13', '27', '13', '04', '22', '02', '20', '01', '07', '15', '05',
'20', '18', '28', '04', '07', '17', '20', '18', '18', '18', '20', '18', '13', '19', '20', '09', '20', '11',
'18', '12', '20', '22', '16', '19', '09', '06', '12', '18', '26', '16', '14', '18', '16', '14', '19', '10',
'24', '08', '19', '24', '19', '19', '13', '16', '21', '09', '08', '24', '07', '10', '02', '08', '08', '16',
'25', '10', '17', '09', '19', '24', '19', '19', '25', '16', '26', '24', '08', '12', '15', '20', '07', '07',
'15', '17', '20', '12', '20', '16', '08', '25', '14', '13', '27', '24', '20', '28', '26', '04', '10', '10',
'14', '18', '11', '22', '04', '13', '25', '17', '29', '08', '21', '21', '17', '21', '28', '29', '04', '21',
'08', '11', '10', '08', '19', '19', '21', '18', '12', '24', '21', '21', '12', '02', '28', '08', '18', '23',
'13', '03', '11', '02', '03', '04', '13', '14', '17', '22', '13', '22', '29', '22', '21', '13', '17', '13',
'22', '22', '22', '13', '28', '22', '22', '01', '29', '04', '22', '13', '15', '17', '13', '13', '23', '23',
'15', '04', '13', '04', '13', '23', '23', '26', '04', '23', '23', '11', '23', '23', '23', '23', '23', '23',
'26', '23', '13', '06', '03', '27', '08', '08', '19', '24', '24', '08', '08', '04', '12', '13', '24', '22',
'16', '25', '21', '24', '24', '16', '18', '24', '19', '24', '24', '24', '13', '16', '24', '13', '24', '24',
'04', '25', '12', '04', '20', '07', '17', '03', '13', '13', '15', '25', '04', '13', '01', '25', '04', '01',
'25', '26', '01', '01', '25', '03', '04', '07', '25', '04', '05', '25', '04', '17', '26', '17', '26', '02',
'04', '26', '14', '17', '15', '26', '26', '04', '04', '15', '17', '04', '26', '04', '13', '01', '03', '08',
'17', '04', '04', '14', '14', '04', '17', '26', '02', '11', '27', '02', '02', '01', '27', '28', '22', '11',
'29', '29', '02', '11', '29', '01', '17', '27', '22', '27', '27', '27', '07', '15', '15', '02', '27', '13',
'10', '28', '07', '14', '20', '28', '28', '15', '29', '13', '15', '22', '10', '04', '15', '10', '28', '04',
'14', '04', '26', '10', '13', '25', '04', '25', '12', '12', '03', '20', '23', '02', '13', '18', '23', '09',
'13', '13', '04', '17', '12', '11', '21', '16', '21', '14', '23', '14', '13', '29', '27', '29', '29', '07',
'27', '12', '19', '16', '24', '02']
ytest = ['01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01',
'01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29']
cm = ConfusionMatrix(actual_vector=ytest, predict_vector=ypred)
print(cm)
print(calculaMediaMicroTPR(cm))
def calculaRBF():
ypred = ['01', '01', '04', '13', '04', '13', '14', '13', '03', '14', '03', '23', '14', '13', '13', '04', '01', '03',
'04', '03', '01', '13', '04', '04', '04', '04', '13', '03', '04', '13', '11', '03', '29', '03', '01', '03',
'13', '02', '28', '13', '13', '13', '13', '13', '13', '29', '04', '13', '13', '13', '04', '03', '04', '13',
'13', '14', '09', '13', '23', '17', '04', '04', '13', '13', '04', '04', '04', '13', '04', '04', '04', '04',
'04', '04', '13', '13', '13', '04', '04', '04', '03', '13', '04', '13', '04', '13', '04', '04', '04', '04',
'04', '04', '04', '04', '04', '04', '04', '04', '13', '04', '13', '04', '13', '04', '04', '13', '04', '04',
'04', '04', '04', '04', '13', '13', '04', '04', '04', '04', '13', '04', '05', '22', '05', '03', '11', '13',
'04', '13', '15', '20', '24', '12', '13', '01', '05', '03', '13', '13', '13', '05', '13', '04', '13', '14',
'08', '29', '03', '23', '07', '04', '06', '13', '06', '04', '13', '04', '12', '04', '13', '12', '04', '13',
'13', '06', '12', '13', '12', '13', '13', '13', '04', '13', '13', '13', '04', '04', '19', '13', '13', '13',
'15', '13', '23', '22', '12', '15', '04', '23', '04', '03', '04', '03', '28', '14', '13', '15', '04', '03',
'09', '02', '03', '03', '13', '13', '03', '15', '04', '04', '13', '13', '24', '08', '24', '24', '16', '04',
'29', '08', '24', '24', '12', '13', '24', '04', '04', '16', '08', '21', '08', '08', '04', '13', '03', '24',
'01', '08', '24', '04', '29', '16', '09', '04', '19', '15', '13', '12', '07', '03', '09', '05', '13', '04',
'04', '04', '09', '12', '07', '19', '04', '04', '09', '16', '09', '09', '09', '02', '25', '09', '09', '04',
'02', '13', '13', '03', '03', '11', '13', '29', '13', '13', '04', '07', '09', '13', '04', '12', '13', '04',
'04', '04', '04', '04', '04', '19', '14', '04', '04', '03', '14', '15', '08', '03', '23', '17', '13', '03',
'11', '13', '16', '14', '03', '01', '04', '03', '03', '03', '13', '08', '23', '11', '04', '13', '15', '12',
'14', '04', '04', '27', '01', '13', '09', '10', '13', '08', '13', '02', '12', '13', '12', '14', '04', '04',
'02', '24', '07', '04', '04', '14', '04', '10', '13', '08', '03', '14', '12', '03', '12', '13', '12', '14',
'13', '04', '04', '04', '04', '04', '04', '04', '13', '04', '04', '13', '04', '04', '04', '04', '13', '04',
'04', '04', '13', '04', '04', '03', '04', '04', '04', '04', '13', '13', '01', '13', '13', '13', '04', '13',
'03', '04', '04', '04', '13', '14', '04', '02', '04', '04', '13', '04', '24', '03', '14', '04', '04', '17',
'13', '13', '04', '04', '13', '13', '13', '04', '13', '04', '23', '15', '13', '04', '04', '04', '14', '04',
'13', '04', '04', '04', '13', '04', '09', '09', '07', '04', '04', '13', '04', '13', '13', '04', '13', '13',
'16', '16', '24', '16', '24', '13', '13', '16', '16', '08', '16', '13', '16', '16', '12', '19', '02', '09',
'08', '04', '16', '12', '13', '03', '14', '13', '16', '05', '16', '15', '04', '02', '13', '03', '04', '13',
'14', '11', '04', '05', '25', '04', '04', '13', '04', '13', '13', '04', '14', '13', '13', '17', '04', '17',
'03', '04', '13', '04', '04', '13', '18', '04', '18', '18', '18', '14', '13', '09', '13', '09', '01', '11',
'02', '09', '03', '04', '18', '11', '17', '13', '12', '04', '13', '13', '13', '01', '13', '14', '01', '13',
'24', '24', '19', '12', '14', '03', '13', '03', '03', '13', '08', '29', '04', '13', '13', '16', '08', '21',
'13', '04', '04', '04', '03', '24', '19', '19', '03', '16', '03', '05', '24', '20', '03', '20', '28', '02',
'03', '04', '20', '02', '20', '13', '08', '03', '13', '04', '04', '12', '15', '13', '13', '04', '13', '04',
'13', '03', '04', '13', '04', '13', '03', '22', '01', '02', '19', '02', '13', '23', '28', '13', '04', '02',
'21', '27', '13', '08', '01', '03', '23', '15', '02', '21', '29', '02', '28', '03', '14', '21', '14', '04',
'04', '13', '04', '13', '04', '13', '04', '04', '03', '04', '04', '04', '04', '04', '17', '04', '03', '04',
'04', '04', '04', '04', '04', '04', '04', '13', '04', '13', '04', '04', '03', '04', '13', '13', '04', '04',
'15', '13', '04', '04', '13', '03', '04', '04', '04', '13', '04', '11', '04', '04', '04', '04', '04', '04',
'13', '04', '04', '13', '04', '04', '08', '02', '10', '24', '24', '08', '08', '13', '12', '04', '24', '03',
'13', '04', '02', '08', '24', '03', '04', '09', '19', '24', '24', '24', '13', '13', '12', '04', '12', '24',
'04', '13', '10', '13', '03', '25', '13', '13', '13', '03', '13', '04', '04', '04', '03', '03', '13', '13',
'03', '13', '13', '03', '13', '13', '13', '17', '13', '13', '13', '04', '13', '03', '13', '04', '13', '03',
'04', '04', '04', '04', '04', '13', '03', '04', '04', '04', '13', '04', '13', '13', '13', '13', '04', '08',
'13', '04', '13', '04', '04', '13', '04', '13', '14', '04', '13', '15', '13', '13', '04', '04', '04', '07',
'03', '04', '04', '13', '04', '13', '13', '04', '04', '04', '04', '13', '03', '13', '13', '02', '13', '13',
'04', '13', '03', '13', '03', '28', '04', '13', '03', '13', '14', '13', '03', '04', '04', '28', '14', '13',
'03', '13', '04', '13', '04', '13', '04', '13', '07', '28', '04', '15', '04', '03', '04', '04', '04', '04',
'04', '04', '04', '04', '25', '04', '11', '23', '14', '13', '13', '14', '13', '11', '04', '17', '03', '04',
'04', '04', '11', '16', '08', '04']
ytest = ['01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01',
'01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '01', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02', '02',
'02', '02', '02', '02', '02', '02', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03', '03',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04',
'04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '04', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05', '05',
'05', '05', '05', '05', '05', '05', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06', '06',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07',
'07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '07', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08', '08',
'08', '08', '08', '08', '08', '08', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09', '09',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10',
'10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '10', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11', '11',
'11', '11', '11', '11', '11', '11', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12', '12',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13',
'13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '13', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14', '14',
'14', '14', '14', '14', '14', '14', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15', '15',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16',
'16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '16', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17', '17',
'17', '17', '17', '17', '17', '17', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18', '18',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19',
'19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '19', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20', '20',
'20', '20', '20', '20', '20', '20', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21', '21',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22',
'22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '22', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23', '23',
'23', '23', '23', '23', '23', '23', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24', '24',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25',
'25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '25', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26', '26',
'26', '26', '26', '26', '26', '26', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27', '27',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28',
'28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '28', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29', '29',
'29', '29', '29', '29', '29', '29']
cm = ConfusionMatrix(actual_vector=ytest, predict_vector=ypred)
print(cm)
print(calculaMediaMicroTPR(cm))
calculaRBF()
| 113.817891
| 120
| 0.304028
| 5,279
| 35,625
| 2.050578
| 0.008903
| 0.132286
| 0.129146
| 0.126374
| 0.835751
| 0.832979
| 0.831871
| 0.830393
| 0.830393
| 0.830393
| 0
| 0.393176
| 0.254653
| 35,625
| 313
| 121
| 113.817891
| 0.014499
| 0
| 0
| 0.822006
| 0
| 0
| 0.293044
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009709
| false
| 0
| 0.006472
| 0
| 0.016181
| 0.019417
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
ae2a727ebd3d5c914b4dab6463d1d2b316b63491
| 2,494
|
py
|
Python
|
Selenita/licencaApple.py
|
wakeupmh/pythonProjects
|
a0e5a3be27115b18d0cef5141741f584447dbd07
|
[
"MIT"
] | 1
|
2019-05-14T13:03:20.000Z
|
2019-05-14T13:03:20.000Z
|
Selenita/licencaApple.py
|
wakeupmh/pythonProjects
|
a0e5a3be27115b18d0cef5141741f584447dbd07
|
[
"MIT"
] | null | null | null |
Selenita/licencaApple.py
|
wakeupmh/pythonProjects
|
a0e5a3be27115b18d0cef5141741f584447dbd07
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import acesso
global usr, passw
def inputs():
print("\n\n################ BEM VINDO AO GERADOR DE LICENÇAS APPLE ###############\n")
usr = input("Digite o e-mail da sua conta Apple Developer:\n")
if(usr.strip() == ''):
print("#ERROR00 - Preencha todos os campos\n")
usr = input("Digite o e-mail da sua conta Apple Developer:\n")
passw = input("Digite a senha da sua conta Apple Developer:\n")
if(passw.strip() == ''):
print("#ERROR00 - Preencha todos os campos\n")
passw = input("Digite a senha da sua conta Apple Developer:\n")
optd = input("Escolha uma opção:\n 1 - Desenvolvimento\n 2 - Produção\n 3 - Gerar ID\n")
if(optd.strip() == ''):
print("#ERROR00 - Preencha todos os campos\n")
optd = input("Escolha uma opção:\n 1 - Desenvolvimento\n 2 - Produção\n 3 - Gerar ID\n")
if(auth(usr, optd) > 0):
inputs()
else:
if(optd == '1' or optd =='2'):
alias = input("Digite o alias do seu app: \n")
nCert = input("Digite o nome que deseja que seu Provisioning File tenha:\n")
else:
alias = input("Digite o alias do seu app (ex: com.desenvolvedor.nomeApp):\n")
nCert = input("Digite o nome que deseja que seu App ID tenha:\n")
acesso.acesso(usr, passw, optd, alias, nCert)
def inputsSemCred(usr, passw):
optd = input("Escolha uma opção:\n 1 - Desenvolvimento\n 2 - Produção\n 3 - Gerar ID\n")
if (optd.strip() == ''):
print("#ERROR00 - Preencha todos os campos\n")
optd = input("Escolha uma opção:\n 1 - Desenvolvimento\n 2 - Produção\n 3 - Gerar ID\n")
if (auth(usr, optd) > 0):
inputs()
else:
if(optd == '1' or optd =='2'):
alias = input("Digite o alias do seu app: \n")
nCert = input("Digite o nome que deseja que seu Provisioning File tenha:\n")
else:
alias = input("Digite o alias do seu app (ex: com.desenvolvedor.nomeApp):\n")
nCert = input("Digite o nome que deseja que seu App ID tenha:\n")
acesso.acesso(usr, passw, optd, alias, nCert)
def auth(usr, optd):
error = 0
if(usr.find('@') < 0 and usr.find('.') < 0):
print('#ERROR01 - Digite um e-mail válido!!!\n')
error+1
if (optd.isdigit() == False):
print('#ERROR02 - Digite um valor válido!!!\n')
error+1
return error
def main():
inputs()
if __name__ == '__main__':
main()
| 40.885246
| 96
| 0.57498
| 359
| 2,494
| 3.972145
| 0.225627
| 0.092567
| 0.084151
| 0.042076
| 0.801543
| 0.801543
| 0.801543
| 0.798738
| 0.745442
| 0.745442
| 0
| 0.019694
| 0.267041
| 2,494
| 61
| 97
| 40.885246
| 0.760394
| 0.00842
| 0
| 0.685185
| 0
| 0.074074
| 0.47856
| 0.023463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.12963
| 0.018519
| 0
| 0.111111
| 0.12963
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
ae5af3c072cf92e69c243558304a25b09f0bb990
| 7,470
|
py
|
Python
|
tests/1000_interfaces/02_po.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 2
|
2019-05-31T00:23:15.000Z
|
2021-04-26T07:26:16.000Z
|
tests/1000_interfaces/02_po.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 27
|
2015-04-21T14:43:26.000Z
|
2022-01-29T00:42:53.000Z
|
tests/1000_interfaces/02_po.py
|
sveetch/Optimus
|
983aebeccd2ada7a5a0ab96f9296d4bba1112022
|
[
"MIT"
] | 1
|
2017-05-21T17:32:28.000Z
|
2017-05-21T17:32:28.000Z
|
# -*- coding: utf-8 -*-
import os
import shutil
from optimus.interfaces.po import po_interface
from optimus.interfaces.starter import starter_interface
from optimus.logs import set_loggers_level
def test_po_interface_init(tmpdir, fixtures_settings, starter_basic_settings):
"""
Init mode should creates the POT file and the enabled langages structure with their
PO files.
"""
# Mute all other loggers from cookiecutter and its dependancies
set_loggers_level(["poyo", "cookiecutter", "binaryornot"])
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
template_path = os.path.join(fixtures_settings.starters_path, sample_name)
project_path = os.path.join(destination, "project")
localedir_path = os.path.join(project_path, "locale")
settings = starter_basic_settings(project_path)
starter_interface(template_path, sample_name, basedir)
# Remove existing locale directory for test needs
shutil.rmtree(localedir_path)
# Start catalog
po_interface(settings, init=True)
# Expected directories and files
assert os.path.exists(localedir_path) is True
assert os.path.exists(os.path.join(localedir_path, "messages.pot")) is True
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.mo",
)
)
is False
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.mo",
)
)
is False
)
def test_po_interface_update(tmpdir, fixtures_settings, starter_basic_settings):
"""
Update mode should just updates (or create it again if missing) the PO files for
all enabled langages.
"""
# Mute all other loggers from cookiecutter and its dependancies
set_loggers_level(["poyo", "cookiecutter", "binaryornot"])
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
template_path = os.path.join(fixtures_settings.starters_path, sample_name)
project_path = os.path.join(destination, "project")
localedir_path = os.path.join(project_path, "locale")
settings = starter_basic_settings(project_path)
starter_interface(template_path, sample_name, basedir)
# Remove catalog files from sample
os.remove(os.path.join(localedir_path, "en_US/LC_MESSAGES/messages.po"))
os.remove(os.path.join(localedir_path, "en_US/LC_MESSAGES/messages.mo"))
os.remove(os.path.join(localedir_path, "fr_FR/LC_MESSAGES/messages.po"))
os.remove(os.path.join(localedir_path, "fr_FR/LC_MESSAGES/messages.mo"))
# Update catalog (it should create again PO files which will use for assertions)
po_interface(settings, update=True)
# Expected directories and files
assert os.path.exists(localedir_path) is True
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
def test_po_interface_compile(tmpdir, fixtures_settings, starter_basic_settings):
"""
Compile mode should compiles the PO files to MO files.
"""
# Mute all other loggers from cookiecutter and its dependancies
set_loggers_level(["poyo", "cookiecutter", "binaryornot"])
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
template_path = os.path.join(fixtures_settings.starters_path, sample_name)
project_path = os.path.join(destination, "project")
localedir_path = os.path.join(project_path, "locale")
settings = starter_basic_settings(project_path)
starter_interface(template_path, sample_name, basedir)
# Remove compiled files from sample
os.remove(os.path.join(localedir_path, "en_US/LC_MESSAGES/messages.mo"))
os.remove(os.path.join(localedir_path, "fr_FR/LC_MESSAGES/messages.mo"))
# Compile MO files
po_interface(settings, compile_opt=True)
# Expected directories and files
assert os.path.exists(localedir_path) is True
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.mo",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.mo",
)
)
is True
)
def test_po_interface_all(tmpdir, fixtures_settings, starter_basic_settings):
"""
All modes combined should create the POT and langages structure, then update it and
compile the MO files.
Note this is not really useful since the compile and update always involve
initialization first.
"""
# Mute all other loggers from cookiecutter and its dependancies
set_loggers_level(["poyo", "cookiecutter", "binaryornot"])
basedir = tmpdir
sample_name = "basic"
destination = os.path.join(basedir, sample_name)
template_path = os.path.join(fixtures_settings.starters_path, sample_name)
project_path = os.path.join(destination, "project")
localedir_path = os.path.join(project_path, "locale")
settings = starter_basic_settings(project_path)
starter_interface(template_path, sample_name, basedir)
# Remove existing locale directory for test needs
shutil.rmtree(localedir_path)
# Compile MO files
po_interface(settings, init=True, update=True, compile_opt=True)
# Expected directories and files
assert os.path.exists(localedir_path) is True
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"en_US",
"LC_MESSAGES",
"messages.mo",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.po",
)
)
is True
)
assert (
os.path.exists(
os.path.join(
localedir_path,
"fr_FR",
"LC_MESSAGES",
"messages.mo",
)
)
is True
)
| 28.403042
| 87
| 0.591165
| 835
| 7,470
| 5.093413
| 0.128144
| 0.07336
| 0.082295
| 0.084881
| 0.833294
| 0.826005
| 0.760169
| 0.760169
| 0.760169
| 0.760169
| 0
| 0.000196
| 0.31834
| 7,470
| 262
| 88
| 28.51145
| 0.835035
| 0.15261
| 0
| 0.731959
| 0
| 0
| 0.110701
| 0.027916
| 0
| 0
| 0
| 0
| 0.087629
| 1
| 0.020619
| false
| 0
| 0.025773
| 0
| 0.046392
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
882b1842b298522477635d60fa3a851a3bd4d65b
| 137
|
py
|
Python
|
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
001_HelloWorld/simple_math.py
|
ufoscout/python_examples
|
efeab0e4c220e76ef4131ff6f76c4c09ee99f4b3
|
[
"MIT"
] | null | null | null |
print "2 + 3 =", 2+3, "!"
print "9/2 =", 9/2
print "9.0/2 =", 9.0/2
print "9%2 =", 9%2
print "5 > -2?", 5 > -2
print "5 <= -2?", 5 <= -2
| 19.571429
| 25
| 0.408759
| 32
| 137
| 1.75
| 0.1875
| 0.142857
| 0.25
| 0.285714
| 0.732143
| 0.732143
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.240876
| 137
| 6
| 26
| 22.833333
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0.291971
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
8846e4540343419d2dfd93d0921ef581be7e41b1
| 187
|
py
|
Python
|
sumo_rl/exploration/__init__.py
|
joaovitorblabres/sumo-rl
|
ec9d178cd0289366ba0a8648da52972d31d1026e
|
[
"MIT"
] | null | null | null |
sumo_rl/exploration/__init__.py
|
joaovitorblabres/sumo-rl
|
ec9d178cd0289366ba0a8648da52972d31d1026e
|
[
"MIT"
] | null | null | null |
sumo_rl/exploration/__init__.py
|
joaovitorblabres/sumo-rl
|
ec9d178cd0289366ba0a8648da52972d31d1026e
|
[
"MIT"
] | null | null | null |
from sumo_rl.exploration.epsilon_greedy import EpsilonGreedy
from sumo_rl.exploration.epsilon_greedy import EpsilonGreedyGroups
from sumo_rl.exploration.epsilon_greedy import MOSelection
| 46.75
| 66
| 0.903743
| 24
| 187
| 6.791667
| 0.416667
| 0.147239
| 0.184049
| 0.386503
| 0.736196
| 0.736196
| 0.736196
| 0
| 0
| 0
| 0
| 0
| 0.064171
| 187
| 3
| 67
| 62.333333
| 0.931429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
ee33a44021dd0b98a4185c7188b41e9876d25b7d
| 163
|
py
|
Python
|
moto/cloudwatch/utils.py
|
jonnangle/moto-1
|
40b4e299abb732aad7f56cc0f680c0a272a46594
|
[
"Apache-2.0"
] | 3
|
2020-08-04T20:29:41.000Z
|
2020-11-09T09:28:19.000Z
|
moto/cloudwatch/utils.py
|
jonnangle/moto-1
|
40b4e299abb732aad7f56cc0f680c0a272a46594
|
[
"Apache-2.0"
] | 17
|
2020-08-28T12:53:56.000Z
|
2020-11-10T01:04:46.000Z
|
moto/cloudwatch/utils.py
|
jonnangle/moto-1
|
40b4e299abb732aad7f56cc0f680c0a272a46594
|
[
"Apache-2.0"
] | 12
|
2017-09-06T22:11:15.000Z
|
2021-05-28T17:22:31.000Z
|
from __future__ import unicode_literals
def make_arn_for_dashboard(account_id, name):
return "arn:aws:cloudwatch::{0}dashboard/{1}".format(account_id, name)
| 27.166667
| 74
| 0.785276
| 24
| 163
| 4.916667
| 0.791667
| 0.152542
| 0.220339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 0.09816
| 163
| 5
| 75
| 32.6
| 0.789116
| 0
| 0
| 0
| 0
| 0
| 0.220859
| 0.220859
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c98a99354aaed083fd5b475a4d82c8e0170bcb01
| 12,692
|
py
|
Python
|
iqs_client/api/mms_repository_api.py
|
thomas-bc/mms-autocref
|
1db6697f929a1c782c902923209389e337ec6961
|
[
"Apache-2.0"
] | null | null | null |
iqs_client/api/mms_repository_api.py
|
thomas-bc/mms-autocref
|
1db6697f929a1c782c902923209389e337ec6961
|
[
"Apache-2.0"
] | null | null | null |
iqs_client/api/mms_repository_api.py
|
thomas-bc/mms-autocref
|
1db6697f929a1c782c902923209389e337ec6961
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
IncQuery Server
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 0.12.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from iqs_client.api_client import ApiClient
class MmsRepositoryApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_mms_repository_info(self, **kwargs): # noqa: E501
"""Get repository structure (orgs, projects, refs, commits) # noqa: E501
Returns the repository structure including orgs, projects, refs and commits with identifiers and names where applicable. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mms_repository_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool return_as_list_of_descriptors: If set to true, commits are returned as descriptors
:return: MMSRepositoryInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_mms_repository_info_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_mms_repository_info_with_http_info(**kwargs) # noqa: E501
return data
def get_mms_repository_info_with_http_info(self, **kwargs): # noqa: E501
"""Get repository structure (orgs, projects, refs, commits) # noqa: E501
Returns the repository structure including orgs, projects, refs and commits with identifiers and names where applicable. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_mms_repository_info_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool return_as_list_of_descriptors: If set to true, commits are returned as descriptors
:return: MMSRepositoryInfoResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['return_as_list_of_descriptors'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_mms_repository_info" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'return_as_list_of_descriptors' in local_var_params:
query_params.append(('returnAsListOfDescriptors', local_var_params['return_as_list_of_descriptors'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mms-repository.info', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MMSRepositoryInfoResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_repository_compartment_details(self, model_compartment, **kwargs): # noqa: E501
"""Retrieve detailed repository structure information for an MMS commit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repository_compartment_details(model_compartment, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ModelCompartment model_compartment: Model compartment descriptor. (required)
:return: MMSCommitDetails
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_repository_compartment_details_with_http_info(model_compartment, **kwargs) # noqa: E501
else:
(data) = self.get_repository_compartment_details_with_http_info(model_compartment, **kwargs) # noqa: E501
return data
def get_repository_compartment_details_with_http_info(self, model_compartment, **kwargs): # noqa: E501
"""Retrieve detailed repository structure information for an MMS commit. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_repository_compartment_details_with_http_info(model_compartment, async_req=True)
>>> result = thread.get()
:param async_req bool
:param ModelCompartment model_compartment: Model compartment descriptor. (required)
:return: MMSCommitDetails
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['model_compartment'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_repository_compartment_details" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'model_compartment' is set
if ('model_compartment' not in local_var_params or
local_var_params['model_compartment'] is None):
raise ValueError("Missing the required parameter `model_compartment` when calling `get_repository_compartment_details`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'model_compartment' in local_var_params:
body_params = local_var_params['model_compartment']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mms-repository.details', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MMSCommitDetails', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_mms_repository(self, **kwargs): # noqa: E501
"""Update repository structure from MMS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mms_repository(async_req=True)
>>> result = thread.get()
:param async_req bool
:param RepositoryUpdate repository_update: Repository update request.
:return: RepositoryUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_mms_repository_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.update_mms_repository_with_http_info(**kwargs) # noqa: E501
return data
def update_mms_repository_with_http_info(self, **kwargs): # noqa: E501
"""Update repository structure from MMS # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_mms_repository_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param RepositoryUpdate repository_update: Repository update request.
:return: RepositoryUpdateResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['repository_update'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_mms_repository" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'repository_update' in local_var_params:
body_params = local_var_params['repository_update']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/mms-repository.update', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RepositoryUpdateResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.91195
| 146
| 0.644185
| 1,440
| 12,692
| 5.369444
| 0.120833
| 0.043456
| 0.057941
| 0.027936
| 0.875841
| 0.850879
| 0.848422
| 0.834972
| 0.826694
| 0.809364
| 0
| 0.014776
| 0.274819
| 12,692
| 317
| 147
| 40.037855
| 0.825293
| 0.345257
| 0
| 0.723926
| 1
| 0
| 0.178497
| 0.067343
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042945
| false
| 0
| 0.02454
| 0
| 0.128834
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c9b93578cb58e1ac13d528c067957804f7cdd503
| 7,616
|
py
|
Python
|
Cifar10/models/Bin_VGG.py
|
coooorn/Pytorch-XNOR-Net
|
5605c4546017c2cdeb7214b0295efa89c1a9f1f3
|
[
"BSD-3-Clause"
] | 71
|
2018-05-13T04:27:41.000Z
|
2022-02-09T03:26:22.000Z
|
Cifar10/models/Bin_VGG.py
|
coooorn/Pytorch-XNOR-Net
|
5605c4546017c2cdeb7214b0295efa89c1a9f1f3
|
[
"BSD-3-Clause"
] | 11
|
2018-06-12T13:40:10.000Z
|
2021-02-07T12:47:25.000Z
|
Cifar10/models/Bin_VGG.py
|
coooorn/Pytorch-XNOR-Net
|
5605c4546017c2cdeb7214b0295efa89c1a9f1f3
|
[
"BSD-3-Clause"
] | 23
|
2018-07-03T12:33:41.000Z
|
2021-08-25T04:26:24.000Z
|
import torch
import math
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F
from collections import OrderedDict
import sys
sys.path.append("..")
from util import BinLinear
from util import BinConv2d
cfg = {
'VGG11': ['M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'VGG13': [64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'VGG16': [64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'VGG19': [64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
}
class Bin_VGG_train(nn.Module):
def __init__(self, vgg_name):
super(Bin_VGG_train, self).__init__()
self.features = self._make_layers(cfg[vgg_name])
self.classifier = nn.Linear(512, 10)
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2./n))
m.bias.data.zero_()
def forward(self, x):
out = self.features(x)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
def _make_layers(self, cfg):
layers = OrderedDict([
('conv0', nn.Conv2d(3, 64, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm2d(64)),
('relu0', nn.ReLU(inplace=True))
])
in_channels = 64
cnt = 1
for x in cfg:
if x == 'M':
layers['pool'+str(cnt)] = nn.MaxPool2d(kernel_size=2, stride=2)
cnt += 1
else:
layers['conv'+str(cnt)] = BinConv2d(in_channels=in_channels, out_channels=x, kernel_size=3, padding=1, istrain=True)
cnt += 1
layers['bn'+str(cnt)] = nn.BatchNorm2d(x)
cnt += 1
layers['relu'+str(cnt)] = nn.ReLU(inplace=True)
cnt += 1
in_channels = x
layers['pool'+str(cnt)] = nn.AvgPool2d(kernel_size=1, stride=1)
return nn.Sequential(layers)
class Bin_VGG_test(nn.Module):
def __init__(self, vgg_name):
super(Bin_VGG_test, self).__init__()
self.features = self._make_layers(cfg[vgg_name])
self.classifier = nn.Linear(512, 10)
def forward(self, x):
out = self.features(x)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
def _make_layers(self, cfg):
layers = OrderedDict([
('conv0', nn.Conv2d(3, 64, kernel_size=3, padding=1)),
('bn0', nn.BatchNorm2d(64)),
('relu0', nn.ReLU(inplace=True))
])
in_channels = 64
cnt = 1
for x in cfg:
if x == 'M':
layers['pool'+str(cnt)] = nn.MaxPool2d(kernel_size=2, stride=2)
cnt += 1
else:
layers['conv'+str(cnt)] = BinConv2d(in_channels=in_channels, out_channels=x, kernel_size=3, padding=1, istrain=False)
cnt += 1
layers['bn'+str(cnt)] = nn.BatchNorm2d(x)
cnt += 1
layers['relu'+str(cnt)] = nn.ReLU(inplace=True)
cnt += 1
in_channels = x
layers['pool'+str(cnt)] = nn.AvgPool2d(kernel_size=1, stride=1)
return nn.Sequential(layers)
class NIN_train(nn.Module):
def __init__(self):
super(NIN_train, self).__init__()
self.conv1 = nn.Conv2d(3, 192, kernel_size=5, stride=1, padding=2)
self.bn1 = nn.BatchNorm2d(192, eps=1e-4, momentum=0.1, affine=False)
self.conv2 = BinConv2d(192, 160, kernel_size=1, stride=1, padding=0, istrain=True)
self.conv3 = BinConv2d(160, 96, kernel_size=1, stride=1, padding=0, istrain=True)
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv4 = BinConv2d(96, 192, kernel_size=5, stride=1, padding=2, istrain=True, drop=0.5)
self.conv5 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=True)
self.conv6 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=True)
self.pool2 = nn.AvgPool2d(kernel_size=3, stride=2, padding=1)
self.conv7 = BinConv2d(192, 192, kernel_size=3, stride=1, padding=1, istrain=True, drop=0.5)
self.conv8 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=True)
self.bn2 = nn.BatchNorm2d(192, eps=1e-4, momentum=0.1, affine=False)
self.conv9 = nn.Conv2d(192, 10, kernel_size=1, stride=1, padding=0)
self.pool3 = nn.AvgPool2d(kernel_size=8, stride=1, padding=0)
for m in self.modules():
if isinstance(m, nn.Conv2d):
m.weight.data.normal_(0, 0.05)
m.bias.data.zero_()
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = F.relu(x,inplace=True)
x = self.conv2(x)
x = F.relu(x, inplace=True)
x = self.conv3(x)
x = F.relu(x, inplace=True)
x = self.pool1(x)
x = self.conv4(x)
x = F.relu(x, inplace=True)
x = self.conv5(x)
x = F.relu(x, inplace=True)
x = self.conv6(x)
x = F.relu(x, inplace=True)
x = self.pool2(x)
x = self.conv7(x)
x = F.relu(x, inplace=True)
x = self.conv8(x)
x = F.relu(x, inplace=True)
x = self.bn2(x)
x = self.conv9(x)
x = F.relu(x, inplace=True)
x = self.pool3(x)
return x.view(x.size(0), 10)
class NIN_test(nn.Module):
def __init__(self):
super(NIN_test, self).__init__()
self.conv1 = nn.Conv2d(3, 192, kernel_size=5, stride=1, padding=2)
self.bn1 = nn.BatchNorm2d(192, eps=1e-4, momentum=0.1, affine=False)
self.conv2 = BinConv2d(192, 160, kernel_size=1, stride=1, padding=0, istrain=False)
self.conv3 = BinConv2d(160, 96, kernel_size=1, stride=1, padding=0, istrain=False)
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.conv4 = BinConv2d(96, 192, kernel_size=5, stride=1, padding=2, istrain=False, drop=0.5)
self.conv5 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=False)
self.conv6 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=False)
self.pool2 = nn.AvgPool2d(kernel_size=3, stride=2, padding=1)
self.conv7 = BinConv2d(192, 192, kernel_size=3, stride=1, padding=1, istrain=False, drop=0.5)
self.conv8 = BinConv2d(192, 192, kernel_size=1, stride=1, padding=0, istrain=False)
self.bn2 = nn.BatchNorm2d(192, eps=1e-4, momentum=0.1, affine=False)
self.conv9 = nn.Conv2d(192, 10, kernel_size=1, stride=1, padding=0)
self.pool3 = nn.AvgPool2d(kernel_size=8, stride=1, padding=0)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = F.relu(x, inplace=True)
x = self.conv2(x)
x = F.relu(x, inplace=True)
x = self.conv3(x)
x = F.relu(x, inplace=True)
x = self.pool1(x)
x = self.conv4(x)
x = F.relu(x, inplace=True)
x = self.conv5(x)
x = F.relu(x, inplace=True)
x = self.conv6(x)
x = F.relu(x, inplace=True)
x = self.pool2(x)
x = self.conv7(x)
x = F.relu(x, inplace=True)
x = self.conv8(x)
x = F.relu(x, inplace=True)
x = self.bn2(x)
x = self.conv9(x)
x = F.relu(x, inplace=True)
x = self.pool3(x)
return x.view(x.size(0), 10)
| 40.296296
| 133
| 0.562369
| 1,131
| 7,616
| 3.694076
| 0.103448
| 0.081379
| 0.067018
| 0.030158
| 0.922451
| 0.913595
| 0.904021
| 0.87865
| 0.863332
| 0.863332
| 0
| 0.093973
| 0.28322
| 7,616
| 188
| 134
| 40.510638
| 0.671368
| 0
| 0
| 0.751445
| 0
| 0
| 0.01392
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057803
| false
| 0
| 0.052023
| 0
| 0.16763
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e8a202f67c8f6dc8b7e3894d4707cea708d1568
| 190
|
py
|
Python
|
experiments.py
|
peterhieuvu/musicxml-to-chords
|
40c68febed0b718def54a9bbcf98b6b485e08bc9
|
[
"MIT"
] | null | null | null |
experiments.py
|
peterhieuvu/musicxml-to-chords
|
40c68febed0b718def54a9bbcf98b6b485e08bc9
|
[
"MIT"
] | null | null | null |
experiments.py
|
peterhieuvu/musicxml-to-chords
|
40c68febed0b718def54a9bbcf98b6b485e08bc9
|
[
"MIT"
] | null | null | null |
import chord
import spelling
# experimental stuff
print(chord.intToText(chord.toInt("Bb", "G", "dominant")))
print(chord.intToText(chord.textToInt("Dm7(b5)/G")))
print(chord.intToText(24))
| 23.75
| 58
| 0.747368
| 26
| 190
| 5.461538
| 0.576923
| 0.211268
| 0.401408
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022599
| 0.068421
| 190
| 7
| 59
| 27.142857
| 0.779661
| 0.094737
| 0
| 0
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0.6
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
14f532c8bd09d187af7724bf3a98a4125f55a4b4
| 10,447
|
py
|
Python
|
tests/modules/users/test_permissions.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 1,420
|
2015-11-20T01:25:14.000Z
|
2022-03-22T03:51:33.000Z
|
tests/modules/users/test_permissions.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 151
|
2016-01-07T09:11:42.000Z
|
2020-11-17T08:37:07.000Z
|
tests/modules/users/test_permissions.py
|
IsmaelJS/test-github-actions
|
97223df261e9736c46875f590c9593dbac0d417b
|
[
"MIT"
] | 389
|
2015-11-23T01:14:31.000Z
|
2022-02-07T08:23:11.000Z
|
# encoding: utf-8
# pylint: disable=invalid-name,missing-docstring
from mock import Mock
import pytest
from werkzeug.exceptions import HTTPException
from app.modules.users import permissions
def test_DenyAbortMixin():
with pytest.raises(HTTPException):
permissions.rules.DenyAbortMixin().deny()
def test_WriteAccessRule_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_regular_user = True
assert permissions.rules.WriteAccessRule().check() is True
authenticated_user_instance.is_regular_user = False
assert permissions.rules.WriteAccessRule().check() is False
def test_ActiveUserRoleRule_anonymous(anonymous_user_instance):
# pylint: disable=unused-argument
assert permissions.rules.ActiveUserRoleRule().check() is False
def test_ActiveUserRoleRule_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_active = True
assert permissions.rules.ActiveUserRoleRule().check() is True
authenticated_user_instance.is_active = False
assert permissions.rules.ActiveUserRoleRule().check() is False
def test_PasswordRequiredRule(authenticated_user_instance):
authenticated_user_instance.password = "correct_password"
assert permissions.rules.PasswordRequiredRule(password="correct_password").check() is True
assert permissions.rules.PasswordRequiredRule(password="wrong_password").check() is False
def test_AdminRoleRule_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_admin = True
assert permissions.rules.AdminRoleRule().check() is True
authenticated_user_instance.is_admin = False
assert permissions.rules.AdminRoleRule().check() is False
def test_SupervisorRoleRule_authenticated_user(authenticated_user_instance):
obj = Mock()
del obj.check_supervisor
assert permissions.rules.SupervisorRoleRule(obj).check() is False
obj.check_supervisor = lambda user: user == authenticated_user_instance
assert permissions.rules.SupervisorRoleRule(obj).check() is True
obj.check_supervisor = lambda user: False
assert permissions.rules.SupervisorRoleRule(obj).check() is False
def test_OwnerRoleRule_authenticated_user(authenticated_user_instance):
obj = Mock()
del obj.check_owner
assert permissions.rules.OwnerRoleRule(obj).check() is False
obj.check_owner = lambda user: user == authenticated_user_instance
assert permissions.rules.OwnerRoleRule(obj).check() is True
obj.check_owner = lambda user: False
assert permissions.rules.OwnerRoleRule(obj).check() is False
def test_PartialPermissionDeniedRule():
with pytest.raises(RuntimeError):
permissions.rules.PartialPermissionDeniedRule().check()
def test_PasswordRequiredPermissionMixin():
mixin = permissions.PasswordRequiredPermissionMixin(
password_required=False
)
with pytest.raises(AttributeError):
mixin.rule()
def test_WriteAccessPermission_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_regular_user = True
with permissions.WriteAccessPermission():
pass
authenticated_user_instance.is_regular_user = False
with pytest.raises(HTTPException):
with permissions.WriteAccessPermission():
pass
def test_RolePermission():
with permissions.RolePermission():
pass
with pytest.raises(RuntimeError):
with permissions.RolePermission(partial=True):
pass
def test_ActiveUserRolePermission_anonymous_user(anonymous_user_instance):
# pylint: disable=unused-argument
with pytest.raises(HTTPException):
with permissions.ActiveUserRolePermission():
pass
def test_ActiveUserRolePermission_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_active = True
with permissions.ActiveUserRolePermission():
pass
authenticated_user_instance.is_active = False
with pytest.raises(HTTPException):
with permissions.ActiveUserRolePermission():
pass
def test_AdminRolePermission_anonymous_user(anonymous_user_instance):
# pylint: disable=unused-argument
with pytest.raises(HTTPException):
with permissions.AdminRolePermission():
pass
def test_AdminRolePermission_authenticated_user(authenticated_user_instance):
authenticated_user_instance.is_admin = True
with permissions.AdminRolePermission():
pass
authenticated_user_instance.is_admin = False
with pytest.raises(HTTPException):
with permissions.AdminRolePermission():
pass
def test_AdminRolePermission_anonymous_user_with_password(anonymous_user_instance):
# pylint: disable=unused-argument
with pytest.raises(HTTPException):
with permissions.AdminRolePermission(password_required=True, password="any_password"):
pass
def test_AdminRolePermission_authenticated_user_with_password_is_admin(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
authenticated_user_instance.is_admin = True
with permissions.AdminRolePermission(password_required=True, password="correct_password"):
pass
with pytest.raises(HTTPException):
with permissions.AdminRolePermission(password_required=True, password="wrong_password"):
pass
def test_AdminRolePermission_authenticated_user_with_password_not_admin(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
authenticated_user_instance.is_admin = False
with pytest.raises(HTTPException):
with permissions.AdminRolePermission(password_required=True, password="correct_password"):
pass
with pytest.raises(HTTPException):
with permissions.AdminRolePermission(password_required=True, password="wrong_password"):
pass
def test_SupervisorRolePermission_anonymous_user(anonymous_user_instance):
# pylint: disable=unused-argument
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission():
pass
def test_SupervisorRolePermission_authenticated_user(authenticated_user_instance):
obj = Mock()
obj.check_supervisor = lambda user: user == authenticated_user_instance
with permissions.SupervisorRolePermission(obj=obj):
pass
del obj.check_supervisor
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission():
pass
def test_SupervisorRolePermission_anonymous_user_with_password(anonymous_user_instance):
# pylint: disable=unused-argument
obj = Mock()
obj.check_supervisor = lambda user: False
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission(
obj=obj,
password_required=True,
password="any_password"
):
pass
def test_SupervisorRolePermission_authenticated_user_with_password_with_check_supervisor(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
obj = Mock()
obj.check_supervisor = lambda user: user == authenticated_user_instance
with permissions.SupervisorRolePermission(
obj=obj,
password_required=True,
password="correct_password"
):
pass
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission(
obj=obj,
password_required=True,
password="wrong_password"
):
pass
def test_SupervisorRolePermission_authenticated_user_with_password_without_check_supervisor(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
obj = Mock()
del obj.check_supervisor
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission(
obj=obj,
password_required=True,
password="correct_password"
):
pass
with pytest.raises(HTTPException):
with permissions.SupervisorRolePermission(
obj=obj,
password_required=True,
password="wrong_password"
):
pass
def test_OwnerRolePermission_anonymous_user(anonymous_user_instance):
# pylint: disable=unused-argument
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission():
pass
def test_OwnerRolePermission_authenticated_user(authenticated_user_instance):
obj = Mock()
obj.check_owner = lambda user: user == authenticated_user_instance
with permissions.OwnerRolePermission(obj=obj):
pass
del obj.check_Owner
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission():
pass
def test_OwnerRolePermission_anonymous_user_with_password(anonymous_user_instance):
# pylint: disable=unused-argument
obj = Mock()
obj.check_owner = lambda user: False
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission(
obj=obj,
password_required=True,
password="any_password"
):
pass
def test_OwnerRolePermission_authenticated_user_with_password_with_check_owner(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
obj = Mock()
obj.check_owner = lambda user: user == authenticated_user_instance
with permissions.OwnerRolePermission(
obj=obj,
password_required=True,
password="correct_password"
):
pass
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission(
obj=obj,
password_required=True,
password="wrong_password"
):
pass
def test_OwnerRolePermission_authenticated_user_with_password_without_check_owner(
authenticated_user_instance
):
authenticated_user_instance.password = "correct_password"
obj = Mock()
del obj.check_owner
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission(
obj=obj,
password_required=True,
password="correct_password"
):
pass
with pytest.raises(HTTPException):
with permissions.OwnerRolePermission(
obj=obj,
password_required=True,
password="wrong_password"
):
pass
| 36.915194
| 98
| 0.740308
| 1,026
| 10,447
| 7.25731
| 0.07115
| 0.136986
| 0.14773
| 0.085684
| 0.873892
| 0.854687
| 0.794655
| 0.756648
| 0.721327
| 0.617513
| 0
| 0.000118
| 0.190772
| 10,447
| 282
| 99
| 37.046099
| 0.880648
| 0.030439
| 0
| 0.770833
| 0
| 0
| 0.035382
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 1
| 0.120833
| false
| 0.325
| 0.016667
| 0
| 0.1375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
11e7840c0791ccaf2d99a44baf10d7c27c1b2321
| 6,871
|
py
|
Python
|
pycommon/test/rc_24_testreporter_ips.py
|
CESNET/Nemea-Framework
|
49877c3306aa570609b00b990b74633b29e5f331
|
[
"BSD-3-Clause"
] | 12
|
2015-10-16T09:22:50.000Z
|
2021-11-14T17:08:38.000Z
|
pycommon/test/rc_24_testreporter_ips.py
|
krkos/Nemea-Framework
|
f9c3265724649968cdc260c3c4c4fbd548b304ff
|
[
"BSD-3-Clause"
] | 160
|
2015-11-27T10:16:44.000Z
|
2021-12-29T16:57:37.000Z
|
pycommon/test/rc_24_testreporter_ips.py
|
krkos/Nemea-Framework
|
f9c3265724649968cdc260c3c4c4fbd548b304ff
|
[
"BSD-3-Clause"
] | 34
|
2015-11-27T14:46:07.000Z
|
2020-11-06T19:17:29.000Z
|
import unittest
import subprocess
import os
import re
import sys
import json
# Expected output:
EXPSTRING = """{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "79fb01dd-135c-4f36-a88d-23f7dff697aa", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "79fb01dd-135c-4f36-a88d-23f7dff697aa", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "91301388-b2a0-44c1-9b98-9cd741d9dfa9", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "91301388-b2a0-44c1-9b98-9cd741d9dfa9", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "67d34bbd-ae90-4395-87be-2c74ca32f0d3", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "67d34bbd-ae90-4395-87be-2c74ca32f0d3", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "0ba775ff-420d-4714-a21b-b465278b1ccb", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "0ba775ff-420d-4714-a21b-b465278b1ccb", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "4735b74c-e65c-42f9-ae4c-6bec446ad500", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "4735b74c-e65c-42f9-ae4c-6bec446ad500", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "aeecc5a8-dedc-4bbf-96b7-fd64ab376863", "Description": "Test Message"}
{"Category": ["Test"], "Node": [{"SW": ["Nemea", "test"], "Type": ["Flow", "Statistical"], "Name": "com.example.nemea.test"}], "EventTime": "2019-03-18T20:18:00Z", "Target": [{"Proto": ["tcp"]}], "Format": "IDEA0", "CeaseTime": "2019-03-18T20:18:00Z", "CreateTime": "2019-03-18T20:19:00Z", "Source": [{"IP4": ["192.168.0.1", "192.168.1.1", "10.0.0.1"], "Proto": ["tcp"]}], "DetectTime": "2019-03-18T20:18:00Z", "ID": "aeecc5a8-dedc-4bbf-96b7-fd64ab376863", "Description": "Test Message"}
"""
idre = r'"ID": "[^"]*",?'
class RCReporterTest(unittest.TestCase):
def test_run_reporter(self):
d = os.path.dirname(__file__)
script = d + "/testiprange2idea.py"
data = d + "/test_data.trapcap"
config = d + "/rc_config/iprange_stdout.yaml"
output = subprocess.check_output(["python2" if sys.version_info[0] < 3 else "python3", script, "-D", "-i", "f:" + data, "-c", config], env={"PYTHONPATH": d + "/.."})
output = re.sub(idre, "", output.decode("utf-8")).split("\n")
expect = re.sub(idre, "", EXPSTRING).split("\n")
output = list(filter(lambda l: l != "", output))
self.assertEqual(len(output), 18)
for i in range(len(expect)):
if expect[i] and output[i]:
o = json.loads(output[i].replace("'", '"'))
e = json.loads(expect[i])
self.assertEqual(o, e)
| 156.159091
| 502
| 0.590162
| 943
| 6,871
| 4.288441
| 0.141039
| 0.071217
| 0.130564
| 0.115727
| 0.860534
| 0.860534
| 0.860534
| 0.860534
| 0.860534
| 0.860534
| 0
| 0.190722
| 0.099549
| 6,871
| 43
| 503
| 159.790698
| 0.462906
| 0.002329
| 0
| 0.277778
| 0
| 0.333333
| 0.873468
| 0.281086
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.027778
| false
| 0
| 0.166667
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
11ef7525481d3666a87e96867d9ab98ef2660945
| 14,973
|
py
|
Python
|
app/utils/examples.py
|
ORANZINO/bouquet_server
|
2ce1bb59df15297878c555dd97e0f27b5202ed02
|
[
"MIT"
] | 7
|
2022-01-20T11:50:39.000Z
|
2022-01-27T09:39:27.000Z
|
app/utils/examples.py
|
ORANZINO/bouquet_server
|
2ce1bb59df15297878c555dd97e0f27b5202ed02
|
[
"MIT"
] | null | null | null |
app/utils/examples.py
|
ORANZINO/bouquet_server
|
2ce1bb59df15297878c555dd97e0f27b5202ed02
|
[
"MIT"
] | 1
|
2022-01-20T11:51:50.000Z
|
2022-01-20T11:51:50.000Z
|
update_user_requests = {
"both": {
"value": {
"name": "오태진",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
}
},
"name": {
"value": {
"name": "오태진"
}
},
"img": {
"value": {
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
}
}
}
update_character_requests = {
"all": {
"value": {
"id": 1,
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"birth": 19990601,
"job": "과일",
"nationality": "플로리다",
"intro": "상큼합니다.",
"tmi": "당도가 높은 편입니다.",
"likes": ["햇빛", "비옥한 토양", "해변가"],
"hates": ["비오는 곳", "낮은 당도", "사과(라이벌)"]
}
},
"part": {
"value": {
"id": 1,
"intro": "상큼합니다.",
"tmi": "당도가 높은 편입니다.",
"likes": ["햇빛", "비옥한 토양", "해변가"]
}
}
}
create_post_requests = {
"Plain": {
"value": {
"text": "이것이 포스팅이다.",
"template": {
"type": "None"
}
}
},
"Image": {
"value": {
"text": "orange pic",
"template": {
"type": "Image",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
}
}
},
"Diary": {
"value": {
"text": "오늘은 일기를 썼다.",
"template": {
"type": "Diary",
"title": "오늘의 일기",
"weather": "맑음",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"date": "20210816",
"content": "오늘은 밥을 먹었다. 참 재미있었다."
}
}
},
"Album": {
"value": {
"text": "새 앨범이 나왔어용",
"template": {
"type": "Album",
"description": "열심히 준비한 앨범입니다!",
"title": "this is hiphop",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"release_date": "20210821",
"tracks": [{"title": "배신의 십자가", "lyric": "으아으아으아으아으아으아"}, {"title": "달콤한 오렌지", "lyric": "우와우와우와우와"}]
}
}
},
"List": {
"value": {
"text": "제가 좋아하는 것들입니당",
"template": {
"type": "List",
"title": "My Favorites",
"content": "these are what I like",
"components": [{"title": "Orange", "img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg", "content": "오렌지 좋아함 ㅎㅎ"}]
}
}
}
}
get_post_responses = {
200: {
"content": {
"application/json": {
"examples": {
"Plain": {
"value": {
"id": 1,
"created_at": "2021-09-02T15:25:46",
"updated_at": "2021-09-02T15:25:46",
"template": {
"type": "None"
},
"text": "이것이 포스팅이다.",
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"comments": [
{
"id": 1,
"created_at": "2021-09-02T15:26:31",
"updated_at": "2021-09-02T15:26:31",
"comment": "이 노래를 불러보지만 내 진심이 닿을지 몰라",
"parent": 0,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"children": [
{
"id": 4,
"created_at": "2021-09-03T20:15:29",
"updated_at": "2021-09-03T20:15:29",
"comment": "Love I want",
"parent": 1,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False
}
]
},
{
"id": 2,
"created_at": "2021-09-03T14:41:07",
"updated_at": "2021-09-03T14:41:07",
"comment": "이 노래를 불러보지만 내 진심이 닿을지 몰라",
"parent": 0,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"children": []
},
{
"id": 3,
"created_at": "2021-09-03T14:46:25",
"updated_at": "2021-09-03T14:46:25",
"comment": "이 노래를 불러보지만 내 진심이 닿을지 몰라",
"parent": 0,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"children": []
}
]
}
},
"Image": {
"value": {
"id": 2,
"created_at": "2021-09-02T15:25:58",
"updated_at": "2021-09-02T15:25:58",
"template": {
"type": "Image",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"text": "orange pic",
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"comments": [
{
"id": 1,
"created_at": "2021-09-02T15:26:31",
"updated_at": "2021-09-02T15:26:31",
"comment": "이 노래를 불러보지만 내 진심이 닿을지 몰라",
"parent": 0,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"children": [
{
"id": 4,
"created_at": "2021-09-03T20:15:29",
"updated_at": "2021-09-03T20:15:29",
"comment": "Love I want",
"parent": 1,
"deleted": False,
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False
}
]
}
]
}
},
"Diary": {
"value": {
"id": 3,
"created_at": "2021-09-02T15:26:07",
"updated_at": "2021-09-02T15:26:07",
"template": {
"type": "Diary",
"title": "오늘의 일기",
"weather": "맑음",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"date": 20210816,
"content": "오늘은 밥을 먹었다. 참 재미있었다."
},
"text": "오늘은 일기를 썼다.",
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"comments": []
}
},
"Album": {
"value": {
"id": 4,
"created_at": "2021-09-02T15:26:14",
"updated_at": "2021-09-02T15:26:14",
"template": {
"type": "Album",
"title": "this is hiphop",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"artist": "오란지",
"description": None,
"release_date": 20210821,
"tracks": [
{
"title": "배신의 십자가",
"lyric": "으아으아으아으아으아으아"
},
{
"title": "달콤한 오렌지",
"lyric": "우와우와우와우와"
}
]
},
"text": "새 앨범이 나왔어용",
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"comments": []
}
},
"List": {
"value": {
"id": 5,
"created_at": "2021-09-02T15:26:21",
"updated_at": "2021-09-02T15:26:21",
"template": {
"type": "List",
"title": "My Favorites",
"content": "these are what I like",
"img": None,
"components": [
{
"title": "Orange",
"img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg",
"content": "오렌지 좋아함 ㅎㅎ"
}
]
},
"text": "제가 좋아하는 것들입니당",
"num_sunshines": 0,
"character_info": {
"name": "오란지",
"profile_img": "https://i.pinimg.com/736x/05/79/5a/05795a16b647118ffb6629390e995adb.jpg"
},
"liked": False,
"comments": []
}
}
}
}
}
},
404: {
"content": {
"application/json": {
"examples": {
"NO MATCH POST": {
"value": {
"msg": "NO_MATCH_POST"
}
}
}
}
}
}
}
| 44.168142
| 162
| 0.2779
| 879
| 14,973
| 4.654152
| 0.187713
| 0.043021
| 0.048399
| 0.080665
| 0.84527
| 0.841848
| 0.76485
| 0.759228
| 0.759228
| 0.759228
| 0
| 0.181416
| 0.60756
| 14,973
| 338
| 163
| 44.298817
| 0.514806
| 0
| 0
| 0.565868
| 0
| 0
| 0.291191
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee928d58811e40163126c834cfdc50364106b9c5
| 4,765
|
py
|
Python
|
tests/parser/22-Knight-Tour-with-holes.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/22-Knight-Tour-with-holes.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/22-Knight-Tour-with-holes.asp.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
% Knight Tour
% Input:
% - size(N), if the chessboard is NxN
% - forbidden(X,Y), if X,Y cannot be reached by the knight.
% Output:
% - move(X1,Y1,X2,Y2), if the knight moves from X1,Y1 to X2,Y2.
% Define the chessboard.
number(X) :- size(X).
number(X) :- number(Y), X=Y-1, X>0.
cell(X,Y) :- number(X), number(Y).
% Guess the moves.
move(X1,Y1,X2,Y2) | non_move(X1,Y1,X2,Y2) :- valid(X1,Y1,X2,Y2), not forbidden(X1,Y1), not forbidden(X2,Y2).
% Compute all valid moves from each cell.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+2, Y1 = Y2+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+2, Y2 = Y1+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+2, Y1 = Y2+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+2, Y2 = Y1+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+1, Y1 = Y2+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+1, Y2 = Y1+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+1, Y1 = Y2+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+1, Y2 = Y1+2.
% Exactly one move entering to each cell.
:- cell(X,Y), not forbidden(X,Y), not exactlyOneMoveEntering(X,Y).
exactlyOneMoveEntering(X,Y) :- move(X1,Y1,X,Y), not atLeastTwoMovesEntering(X,Y).
atLeastTwoMovesEntering(X,Y) :- move(X1,Y1,X,Y), move(X2,Y2,X,Y), X1 != X2.
atLeastTwoMovesEntering(X,Y) :- move(X1,Y1,X,Y), move(X2,Y2,X,Y), Y1 != Y2.
% Exactly one move leaving each cell.
:- cell(X,Y), not forbidden(X,Y), not exactlyOneMoveLeaving(X,Y).
exactlyOneMoveLeaving(X,Y) :- move(X,Y,X1,Y1), not atLeastTwoMovesLeaving(X,Y).
atLeastTwoMovesLeaving(X,Y) :- move(X,Y,X1,Y1), move(X,Y,X2,Y2), X1 != X2.
atLeastTwoMovesLeaving(X,Y) :- move(X,Y,X1,Y1), move(X,Y,X2,Y2), Y1 != Y2.
% Each non-forbidden cell must be reached by the knight.
reached(X,Y) :- move(_,_,X,Y).
reached(X,Y) :- move(X,Y,_,_).
reached(X,Y) :- reached(X1,Y1), move(X1,Y1,X,Y).
:- cell(X,Y), not forbidden(X,Y), not reached(X,Y).
% Each forbidden cell must remain unreached.
:- forbidden(X,Y), reached(X,Y).
% Each solution must consist of a single tour performed by a single knight
connected(X,Y,X1,Y1) :- move(X,Y,X1,Y1).
connected(X,Y,X1,Y1) :- connected(X,Y,X2,Y2), move(X2,Y2,X1,Y1).
connected(X,Y,X,Y) :- cell(X,Y), not forbidden(X,Y).
:- cell(X1,Y1), cell(X2,Y2), not forbidden(X1,Y1), not forbidden(X2,Y2), not connected(X1,Y1,X2,Y2).
"""
output = """
% Knight Tour
% Input:
% - size(N), if the chessboard is NxN
% - forbidden(X,Y), if X,Y cannot be reached by the knight.
% Output:
% - move(X1,Y1,X2,Y2), if the knight moves from X1,Y1 to X2,Y2.
% Define the chessboard.
number(X) :- size(X).
number(X) :- number(Y), X=Y-1, X>0.
cell(X,Y) :- number(X), number(Y).
% Guess the moves.
move(X1,Y1,X2,Y2) | non_move(X1,Y1,X2,Y2) :- valid(X1,Y1,X2,Y2), not forbidden(X1,Y1), not forbidden(X2,Y2).
% Compute all valid moves from each cell.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+2, Y1 = Y2+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+2, Y2 = Y1+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+2, Y1 = Y2+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+2, Y2 = Y1+1.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+1, Y1 = Y2+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X1 = X2+1, Y2 = Y1+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+1, Y1 = Y2+2.
valid(X1,Y1,X2,Y2) :- cell(X1,Y1), cell(X2,Y2), X2 = X1+1, Y2 = Y1+2.
% Exactly one move entering to each cell.
:- cell(X,Y), not forbidden(X,Y), not exactlyOneMoveEntering(X,Y).
exactlyOneMoveEntering(X,Y) :- move(X1,Y1,X,Y), not atLeastTwoMovesEntering(X,Y).
atLeastTwoMovesEntering(X,Y) :- move(X1,Y1,X,Y), move(X2,Y2,X,Y), X1 != X2.
atLeastTwoMovesEntering(X,Y) :- move(X1,Y1,X,Y), move(X2,Y2,X,Y), Y1 != Y2.
% Exactly one move leaving each cell.
:- cell(X,Y), not forbidden(X,Y), not exactlyOneMoveLeaving(X,Y).
exactlyOneMoveLeaving(X,Y) :- move(X,Y,X1,Y1), not atLeastTwoMovesLeaving(X,Y).
atLeastTwoMovesLeaving(X,Y) :- move(X,Y,X1,Y1), move(X,Y,X2,Y2), X1 != X2.
atLeastTwoMovesLeaving(X,Y) :- move(X,Y,X1,Y1), move(X,Y,X2,Y2), Y1 != Y2.
% Each non-forbidden cell must be reached by the knight.
reached(X,Y) :- move(_,_,X,Y).
reached(X,Y) :- move(X,Y,_,_).
reached(X,Y) :- reached(X1,Y1), move(X1,Y1,X,Y).
:- cell(X,Y), not forbidden(X,Y), not reached(X,Y).
% Each forbidden cell must remain unreached.
:- forbidden(X,Y), reached(X,Y).
% Each solution must consist of a single tour performed by a single knight
connected(X,Y,X1,Y1) :- move(X,Y,X1,Y1).
connected(X,Y,X1,Y1) :- connected(X,Y,X2,Y2), move(X2,Y2,X1,Y1).
connected(X,Y,X,Y) :- cell(X,Y), not forbidden(X,Y).
:- cell(X1,Y1), cell(X2,Y2), not forbidden(X1,Y1), not forbidden(X2,Y2), not connected(X1,Y1,X2,Y2).
"""
| 40.726496
| 108
| 0.641553
| 954
| 4,765
| 3.19392
| 0.059748
| 0.0617
| 0.051198
| 0.068264
| 0.99639
| 0.99639
| 0.99639
| 0.99639
| 0.99639
| 0.99639
| 0
| 0.093471
| 0.128856
| 4,765
| 116
| 109
| 41.077586
| 0.640569
| 0
| 0
| 0.977778
| 0
| 0.533333
| 0.993494
| 0.142707
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
eed0708b691d32484dcd9c83cb24bc839d0a76a7
| 6,683
|
py
|
Python
|
loldib/getratings/models/NA/na_nidalee/na_nidalee_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nidalee/na_nidalee_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nidalee/na_nidalee_sup.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nidalee_Sup_Aatrox(Ratings):
pass
class NA_Nidalee_Sup_Ahri(Ratings):
pass
class NA_Nidalee_Sup_Akali(Ratings):
pass
class NA_Nidalee_Sup_Alistar(Ratings):
pass
class NA_Nidalee_Sup_Amumu(Ratings):
pass
class NA_Nidalee_Sup_Anivia(Ratings):
pass
class NA_Nidalee_Sup_Annie(Ratings):
pass
class NA_Nidalee_Sup_Ashe(Ratings):
pass
class NA_Nidalee_Sup_AurelionSol(Ratings):
pass
class NA_Nidalee_Sup_Azir(Ratings):
pass
class NA_Nidalee_Sup_Bard(Ratings):
pass
class NA_Nidalee_Sup_Blitzcrank(Ratings):
pass
class NA_Nidalee_Sup_Brand(Ratings):
pass
class NA_Nidalee_Sup_Braum(Ratings):
pass
class NA_Nidalee_Sup_Caitlyn(Ratings):
pass
class NA_Nidalee_Sup_Camille(Ratings):
pass
class NA_Nidalee_Sup_Cassiopeia(Ratings):
pass
class NA_Nidalee_Sup_Chogath(Ratings):
pass
class NA_Nidalee_Sup_Corki(Ratings):
pass
class NA_Nidalee_Sup_Darius(Ratings):
pass
class NA_Nidalee_Sup_Diana(Ratings):
pass
class NA_Nidalee_Sup_Draven(Ratings):
pass
class NA_Nidalee_Sup_DrMundo(Ratings):
pass
class NA_Nidalee_Sup_Ekko(Ratings):
pass
class NA_Nidalee_Sup_Elise(Ratings):
pass
class NA_Nidalee_Sup_Evelynn(Ratings):
pass
class NA_Nidalee_Sup_Ezreal(Ratings):
pass
class NA_Nidalee_Sup_Fiddlesticks(Ratings):
pass
class NA_Nidalee_Sup_Fiora(Ratings):
pass
class NA_Nidalee_Sup_Fizz(Ratings):
pass
class NA_Nidalee_Sup_Galio(Ratings):
pass
class NA_Nidalee_Sup_Gangplank(Ratings):
pass
class NA_Nidalee_Sup_Garen(Ratings):
pass
class NA_Nidalee_Sup_Gnar(Ratings):
pass
class NA_Nidalee_Sup_Gragas(Ratings):
pass
class NA_Nidalee_Sup_Graves(Ratings):
pass
class NA_Nidalee_Sup_Hecarim(Ratings):
pass
class NA_Nidalee_Sup_Heimerdinger(Ratings):
pass
class NA_Nidalee_Sup_Illaoi(Ratings):
pass
class NA_Nidalee_Sup_Irelia(Ratings):
pass
class NA_Nidalee_Sup_Ivern(Ratings):
pass
class NA_Nidalee_Sup_Janna(Ratings):
pass
class NA_Nidalee_Sup_JarvanIV(Ratings):
pass
class NA_Nidalee_Sup_Jax(Ratings):
pass
class NA_Nidalee_Sup_Jayce(Ratings):
pass
class NA_Nidalee_Sup_Jhin(Ratings):
pass
class NA_Nidalee_Sup_Jinx(Ratings):
pass
class NA_Nidalee_Sup_Kalista(Ratings):
pass
class NA_Nidalee_Sup_Karma(Ratings):
pass
class NA_Nidalee_Sup_Karthus(Ratings):
pass
class NA_Nidalee_Sup_Kassadin(Ratings):
pass
class NA_Nidalee_Sup_Katarina(Ratings):
pass
class NA_Nidalee_Sup_Kayle(Ratings):
pass
class NA_Nidalee_Sup_Kayn(Ratings):
pass
class NA_Nidalee_Sup_Kennen(Ratings):
pass
class NA_Nidalee_Sup_Khazix(Ratings):
pass
class NA_Nidalee_Sup_Kindred(Ratings):
pass
class NA_Nidalee_Sup_Kled(Ratings):
pass
class NA_Nidalee_Sup_KogMaw(Ratings):
pass
class NA_Nidalee_Sup_Leblanc(Ratings):
pass
class NA_Nidalee_Sup_LeeSin(Ratings):
pass
class NA_Nidalee_Sup_Leona(Ratings):
pass
class NA_Nidalee_Sup_Lissandra(Ratings):
pass
class NA_Nidalee_Sup_Lucian(Ratings):
pass
class NA_Nidalee_Sup_Lulu(Ratings):
pass
class NA_Nidalee_Sup_Lux(Ratings):
pass
class NA_Nidalee_Sup_Malphite(Ratings):
pass
class NA_Nidalee_Sup_Malzahar(Ratings):
pass
class NA_Nidalee_Sup_Maokai(Ratings):
pass
class NA_Nidalee_Sup_MasterYi(Ratings):
pass
class NA_Nidalee_Sup_MissFortune(Ratings):
pass
class NA_Nidalee_Sup_MonkeyKing(Ratings):
pass
class NA_Nidalee_Sup_Mordekaiser(Ratings):
pass
class NA_Nidalee_Sup_Morgana(Ratings):
pass
class NA_Nidalee_Sup_Nami(Ratings):
pass
class NA_Nidalee_Sup_Nasus(Ratings):
pass
class NA_Nidalee_Sup_Nautilus(Ratings):
pass
class NA_Nidalee_Sup_Nidalee(Ratings):
pass
class NA_Nidalee_Sup_Nocturne(Ratings):
pass
class NA_Nidalee_Sup_Nunu(Ratings):
pass
class NA_Nidalee_Sup_Olaf(Ratings):
pass
class NA_Nidalee_Sup_Orianna(Ratings):
pass
class NA_Nidalee_Sup_Ornn(Ratings):
pass
class NA_Nidalee_Sup_Pantheon(Ratings):
pass
class NA_Nidalee_Sup_Poppy(Ratings):
pass
class NA_Nidalee_Sup_Quinn(Ratings):
pass
class NA_Nidalee_Sup_Rakan(Ratings):
pass
class NA_Nidalee_Sup_Rammus(Ratings):
pass
class NA_Nidalee_Sup_RekSai(Ratings):
pass
class NA_Nidalee_Sup_Renekton(Ratings):
pass
class NA_Nidalee_Sup_Rengar(Ratings):
pass
class NA_Nidalee_Sup_Riven(Ratings):
pass
class NA_Nidalee_Sup_Rumble(Ratings):
pass
class NA_Nidalee_Sup_Ryze(Ratings):
pass
class NA_Nidalee_Sup_Sejuani(Ratings):
pass
class NA_Nidalee_Sup_Shaco(Ratings):
pass
class NA_Nidalee_Sup_Shen(Ratings):
pass
class NA_Nidalee_Sup_Shyvana(Ratings):
pass
class NA_Nidalee_Sup_Singed(Ratings):
pass
class NA_Nidalee_Sup_Sion(Ratings):
pass
class NA_Nidalee_Sup_Sivir(Ratings):
pass
class NA_Nidalee_Sup_Skarner(Ratings):
pass
class NA_Nidalee_Sup_Sona(Ratings):
pass
class NA_Nidalee_Sup_Soraka(Ratings):
pass
class NA_Nidalee_Sup_Swain(Ratings):
pass
class NA_Nidalee_Sup_Syndra(Ratings):
pass
class NA_Nidalee_Sup_TahmKench(Ratings):
pass
class NA_Nidalee_Sup_Taliyah(Ratings):
pass
class NA_Nidalee_Sup_Talon(Ratings):
pass
class NA_Nidalee_Sup_Taric(Ratings):
pass
class NA_Nidalee_Sup_Teemo(Ratings):
pass
class NA_Nidalee_Sup_Thresh(Ratings):
pass
class NA_Nidalee_Sup_Tristana(Ratings):
pass
class NA_Nidalee_Sup_Trundle(Ratings):
pass
class NA_Nidalee_Sup_Tryndamere(Ratings):
pass
class NA_Nidalee_Sup_TwistedFate(Ratings):
pass
class NA_Nidalee_Sup_Twitch(Ratings):
pass
class NA_Nidalee_Sup_Udyr(Ratings):
pass
class NA_Nidalee_Sup_Urgot(Ratings):
pass
class NA_Nidalee_Sup_Varus(Ratings):
pass
class NA_Nidalee_Sup_Vayne(Ratings):
pass
class NA_Nidalee_Sup_Veigar(Ratings):
pass
class NA_Nidalee_Sup_Velkoz(Ratings):
pass
class NA_Nidalee_Sup_Vi(Ratings):
pass
class NA_Nidalee_Sup_Viktor(Ratings):
pass
class NA_Nidalee_Sup_Vladimir(Ratings):
pass
class NA_Nidalee_Sup_Volibear(Ratings):
pass
class NA_Nidalee_Sup_Warwick(Ratings):
pass
class NA_Nidalee_Sup_Xayah(Ratings):
pass
class NA_Nidalee_Sup_Xerath(Ratings):
pass
class NA_Nidalee_Sup_XinZhao(Ratings):
pass
class NA_Nidalee_Sup_Yasuo(Ratings):
pass
class NA_Nidalee_Sup_Yorick(Ratings):
pass
class NA_Nidalee_Sup_Zac(Ratings):
pass
class NA_Nidalee_Sup_Zed(Ratings):
pass
class NA_Nidalee_Sup_Ziggs(Ratings):
pass
class NA_Nidalee_Sup_Zilean(Ratings):
pass
class NA_Nidalee_Sup_Zyra(Ratings):
pass
| 16.026379
| 46
| 0.77151
| 972
| 6,683
| 4.878601
| 0.151235
| 0.203712
| 0.407423
| 0.494728
| 0.808941
| 0.808941
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166243
| 6,683
| 416
| 47
| 16.064904
| 0.851041
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
eeea5bf4aa4cf9ca1a57860939418f72ff6d9116
| 10,335
|
py
|
Python
|
cliCube.py
|
SirSnuffles/Rubiks-Cube
|
1d4b275602fb90d8f768483bc71a8f4fdf0116a4
|
[
"Apache-2.0"
] | null | null | null |
cliCube.py
|
SirSnuffles/Rubiks-Cube
|
1d4b275602fb90d8f768483bc71a8f4fdf0116a4
|
[
"Apache-2.0"
] | null | null | null |
cliCube.py
|
SirSnuffles/Rubiks-Cube
|
1d4b275602fb90d8f768483bc71a8f4fdf0116a4
|
[
"Apache-2.0"
] | null | null | null |
class cliCube():
"""Creates a cli representation of a rubik's cube"""
def __init__(self):
"""initiate a 6x9 array of values"""
self.state = [
9*['yellow'],
9*['blue'],
9*['red'],
9*['green'],
9*['orange'],
9*['white']
]
def __str__(self):
"""prints a formatted perspective of state for clarity"""
#
# '#' '#' '#'
# '#' 'y' '#'1 Top
# '#' '#' '#'
# '#' '#' '#' '#' '#' '#' '#' '#' '#' '#' '#' '#'
# '#' 'b' '#'3 Left '#' 'r' '#'2Front '#' 'g' '#'4 Right '#' 'o' '#'6 Back
# '#' '#' '#' '#' '#' '#' '#' '#' '#' '#' '#' '#'
# '#' '#' '#'
# '#' 'w' '#'5 Bottom
# '#' '#' '#'
#Top = T
#Left = L
#Right = R
#Front = F
#Bottom = B
#Back = Ba
print("{:>37}{:>10}{:>10}".format(*self.state[0][:3]))
print("{:>37}{:>10}{:>10}".format(*self.state[0][3:10]))
print("{:>37}{:>10}{:>10}".format(*self.state[0][6:9]))
print("")
print("{}{:>10}{:>10}".format(*self.state[2][:3]), "{:>10}{:>10}{:>10}".format(*self.state[1][:3]), "{:>10}{:>10}{:>10}".format(*self.state[3][:3]))
print("{}{:>11}{:>10}".format(*self.state[2][3:10]), "{:>10}{:>10}{:>10}".format(*self.state[1][3:10]), "{:>10}{:>10}{:>10}".format(*self.state[3][3:10]))
print("{}{:>10}{:>10}".format(*self.state[2][6:9]), "{:>10}{:>10}{:>10}".format(*self.state[1][6:9]), "{:>10}{:>10}{:>10}".format(*self.state[3][6:9]))
print("")
print("{:>37}{:>10}{:>10}".format(*self.state[4][:3]))
print("{:>37}{:>10}{:>10}".format(*self.state[4][3:10]))
print("{:>37}{:>10}{:>10}".format(*self.state[4][6:9]))
print("")
print("{:>37}{:>10}{:>10}".format(*self.state[5][:3]))
print("{:>37}{:>10}{:>10}".format(*self.state[5][3:10]))
print("{:>37}{:>10}{:>10}".format(*self.state[5][6:9]))
def __repr__(self):
"""prints settings of cube to be created"""
print(self.state)
print(self.vertices)
print(self.edges)
print(self.surfaces)
print(self.colors)
def rotateFace(self, dir, index):
"""rotate the index of the state given the direction"""
if dir == "CW":
self.state[index][2], self.state[index][5], self.state[index][8], \
self.state[index][1], self.state[index][4], self.state[index][7], \
self.state[index][0], self.state[index][3], self.state[index][6] = \
self.state[index][0], self.state[index][1], self.state[index][2], \
self.state[index][3], self.state[index][4], self.state[index][5], \
self.state[index][6], self.state[index][7], self.state[index][8]
elif dir == "CCW":
self.state[index][6], self.state[index][3], self.state[index][0], \
self.state[index][7], self.state[index][4], self.state[index][1], \
self.state[index][8], self.state[index][5], self.state[index][2] = \
self.state[index][0], self.state[index][1], self.state[index][2], \
self.state[index][3], self.state[index][4], self.state[index][5], \
self.state[index][6], self.state[index][7], self.state[index][8]
else:
print("Can't rotate face values!")
def rotateTop(self, face, dir):
"""rotate the top layer"""
index = 0
self.rotateFace(dir, index)
if dir == 'CW':
self.state[4][0], self.state[4][1], self.state[4][2], \
self.state[1][0], self.state[1][1], self.state[1][2], \
self.state[2][0], self.state[2][1], self.state[2][2], \
self.state[3][0], self.state[3][1], self.state[3][2] = \
self.state[1][0], self.state[1][1], self.state[1][2], \
self.state[2][0], self.state[2][1], self.state[2][2], \
self.state[3][0], self.state[3][1], self.state[3][2], \
self.state[4][0], self.state[4][1], self.state[4][2]
elif dir == 'CCW':
self.state[1][0], self.state[1][1], self.state[1][2], \
self.state[2][0], self.state[2][1], self.state[2][2], \
self.state[3][0], self.state[3][1], self.state[3][2], \
self.state[4][0], self.state[4][1], self.state[4][2] = \
self.state[4][0], self.state[4][1], self.state[4][2], \
self.state[1][0], self.state[1][1], self.state[1][2], \
self.state[2][0], self.state[2][1], self.state[2][2], \
self.state[3][0], self.state[3][1], self.state[3][2]
def rotateFront(self, face, dir):
"""rotate the front layer"""
index = 2
self.rotateFace(dir, index)
if dir == 'CW':
self.state[0][8], self.state[0][7], self.state[0][6], \
self.state[1][2], self.state[1][5], self.state[1][8], \
self.state[3][0], self.state[3][3], self.state[3][6], \
self.state[5][2], self.state[5][1], self.state[5][0] = \
self.state[1][2], self.state[1][5], self.state[1][8], \
self.state[5][0], self.state[5][1], self.state[5][2], \
self.state[0][6], self.state[0][7], self.state[0][8], \
self.state[3][0], self.state[3][3], self.state[3][6]
elif dir == 'CCW':
self.state[1][2], self.state[1][5], self.state[1][8], \
self.state[5][0], self.state[5][1], self.state[5][2], \
self.state[0][6], self.state[0][7], self.state[0][8], \
self.state[3][0], self.state[3][3], self.state[3][6] = \
self.state[0][8], self.state[0][7], self.state[0][6], \
self.state[1][2], self.state[1][5], self.state[1][8], \
self.state[3][0], self.state[3][3], self.state[3][6], \
self.state[5][2], self.state[5][1], self.state[5][0]
def rotateLeft(self, face, dir):
"""rotate the left layer"""
index = 1
self.rotateFace(dir, index)
if dir == 'CW':
self.state[2][0], self.state[2][3], self.state[2][6], \
self.state[5][0], self.state[5][3], self.state[5][6], \
self.state[0][0], self.state[0][3], self.state[0][6], \
self.state[4][2], self.state[4][5], self.state[4][8] = \
self.state[0][0], self.state[0][3], self.state[0][6], \
self.state[2][0], self.state[2][3], self.state[2][6], \
self.state[4][8], self.state[4][5], self.state[4][2], \
self.state[5][6], self.state[5][3], self.state[5][0]
elif dir == 'CCW':
self.state[0][0], self.state[0][3], self.state[0][6], \
self.state[2][0], self.state[2][3], self.state[2][6], \
self.state[4][8], self.state[4][5], self.state[4][2], \
self.state[5][6], self.state[5][3], self.state[5][0] = \
self.state[2][0], self.state[2][3], self.state[2][6], \
self.state[5][0], self.state[5][3], self.state[5][6], \
self.state[0][0], self.state[0][3], self.state[0][6], \
self.state[4][2], self.state[4][5], self.state[4][8]
def rotateRight(self, face, dir):
"""rotate the right layer"""
index = 3
self.rotateFace(dir, index)
if dir == 'CW':
self.state[0][2], self.state[0][5], self.state[0][8], \
self.state[2][2], self.state[2][5], self.state[2][8], \
self.state[4][6], self.state[4][3], self.state[4][0], \
self.state[5][8], self.state[5][5], self.state[5][2] = \
self.state[2][2], self.state[2][5], self.state[2][8], \
self.state[5][2], self.state[5][5], self.state[5][8], \
self.state[0][2], self.state[0][5], self.state[0][8], \
self.state[4][0], self.state[4][3], self.state[4][6]
elif dir == 'CCW':
self.state[2][2], self.state[2][5], self.state[2][8], \
self.state[5][2], self.state[5][5], self.state[5][8], \
self.state[0][2], self.state[0][5], self.state[0][8], \
self.state[4][0], self.state[4][3], self.state[4][6] = \
self.state[0][2], self.state[0][5], self.state[0][8], \
self.state[2][2], self.state[2][5], self.state[2][8], \
self.state[4][6], self.state[4][3], self.state[4][0], \
self.state[5][8], self.state[5][5], self.state[5][2]
def rotateBottom(self, face, dir):
"""rotate the bottom layer"""
index = 5
self.rotateFace(dir, index)
if dir == 'CW':
self.state[2][6], self.state[2][7], self.state[2][8], \
self.state[3][6], self.state[3][7], self.state[3][8], \
self.state[4][6], self.state[4][7], self.state[4][8], \
self.state[1][6], self.state[1][7], self.state[1][8] = \
self.state[1][6], self.state[1][7], self.state[1][8], \
self.state[2][6], self.state[2][7], self.state[2][8], \
self.state[3][6], self.state[3][7], self.state[3][8], \
self.state[4][6], self.state[4][7], self.state[4][8]
elif dir == 'CCW':
self.state[1][6], self.state[1][7], self.state[1][8], \
self.state[2][6], self.state[2][7], self.state[2][8], \
self.state[3][6], self.state[3][7], self.state[3][8], \
self.state[4][6], self.state[4][7], self.state[4][8] = \
self.state[2][6], self.state[2][7], self.state[2][8], \
self.state[3][6], self.state[3][7], self.state[3][8], \
self.state[4][6], self.state[4][7], self.state[4][8], \
self.state[1][6], self.state[1][7], self.state[1][8]
def rotateBack(self, face, dir):
"""rotate the back layer"""
index = 4
self.rotateFace(dir, index)
if dir == 'CW':
self.state[0][0], self.state[0][1], self.state[0][2], \
self.state[1][6], self.state[1][3], self.state[1][0], \
self.state[3][2], self.state[3][5], self.state[3][8], \
self.state[5][8], self.state[5][7], self.state[5][6] = \
self.state[3][2], self.state[3][5], self.state[3][8], \
self.state[0][0], self.state[0][1], self.state[0][2], \
self.state[5][8], self.state[5][7], self.state[5][6], \
self.state[1][6], self.state[1][3], self.state[1][0]
elif dir == 'CCW':
self.state[3][2], self.state[3][5], self.state[3][8], \
self.state[0][0], self.state[0][1], self.state[0][2], \
self.state[5][8], self.state[5][7], self.state[5][6], \
self.state[1][6], self.state[1][3], self.state[1][0] = \
self.state[0][0], self.state[0][1], self.state[0][2], \
self.state[1][6], self.state[1][3], self.state[1][0], \
self.state[3][2], self.state[3][5], self.state[3][8], \
self.state[5][8], self.state[5][7], self.state[5][6]
def rotateLayer(self, move):
"""rotate the any layer based on format of move"""
if move[0:2] == "Ba":
face = "Ba"
direction = move[2:]
else:
face = move[0]
direction = move[1:]
if face == 'T': #Top
self.rotateTop(face, direction)
elif face == 'F': #Front
self.rotateFront(face, direction)
elif face == 'L': #Left
self.rotateLeft(face, direction)
elif face == 'R': #Right
self.rotateRight(face, direction)
elif face == 'B': #Bottom
self.rotateBottom(face, direction)
elif face == 'Ba': #Back
self.rotateBack(face, direction)
else:
print('ERROR! Unknown face name: input = "T", "F", "B", "Ba", "L", "R"')
return
| 40.371094
| 156
| 0.546105
| 1,808
| 10,335
| 3.115044
| 0.056416
| 0.549716
| 0.090554
| 0.054332
| 0.810014
| 0.787464
| 0.775036
| 0.721058
| 0.705966
| 0.658203
| 0
| 0.091076
| 0.157523
| 10,335
| 256
| 157
| 40.371094
| 0.55576
| 0.083986
| 0
| 0.517767
| 0
| 0.005076
| 0.050889
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055838
| false
| 0
| 0
| 0
| 0.06599
| 0.111675
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1068e8b8df875ba01ca2acfdd3711df2979f81c
| 45
|
py
|
Python
|
datatree/_version.py
|
TomNicholas/datatree
|
df6925faa80bdc24a9f1b60cb37fef81d82f7846
|
[
"Apache-2.0"
] | 31
|
2021-08-24T17:17:34.000Z
|
2022-02-14T21:21:18.000Z
|
datatree/_version.py
|
TomNicholas/datatree
|
df6925faa80bdc24a9f1b60cb37fef81d82f7846
|
[
"Apache-2.0"
] | 31
|
2021-08-24T00:22:01.000Z
|
2022-02-18T00:35:27.000Z
|
datatree/_version.py
|
TomNicholas/datatree
|
df6925faa80bdc24a9f1b60cb37fef81d82f7846
|
[
"Apache-2.0"
] | 3
|
2021-08-24T21:01:07.000Z
|
2022-02-25T23:12:10.000Z
|
__version__ = "0.1.dev75+g977ffe2.d20210902"
| 22.5
| 44
| 0.777778
| 6
| 45
| 5.166667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.380952
| 0.066667
| 45
| 1
| 45
| 45
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0.622222
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d66e618958469e0dd670182fce99a5f1af7d90d
| 2,549
|
py
|
Python
|
MyDailyNews/src/dataGetter/funnyNews/joke.py
|
Jackal007/MyDailyNews
|
964a55abf4b4d1ca7a228a641b9dbd7c6b44e192
|
[
"Unlicense"
] | 2
|
2017-11-21T06:39:42.000Z
|
2020-05-05T08:40:07.000Z
|
MyDailyNews/src/dataGetter/funnyNews/joke.py
|
Jackal007/MyDailyNews
|
964a55abf4b4d1ca7a228a641b9dbd7c6b44e192
|
[
"Unlicense"
] | null | null | null |
MyDailyNews/src/dataGetter/funnyNews/joke.py
|
Jackal007/MyDailyNews
|
964a55abf4b4d1ca7a228a641b9dbd7c6b44e192
|
[
"Unlicense"
] | null | null | null |
'''
Created on 2017年10月3日
@author: jack
'''
from lib.utils.spiderUtils import RequestsSpider
from lib.core.myTypes import News
class QSBKSpider(RequestsSpider):
def __init__(self):
RequestsSpider.__init__(self, 'https://www.qiushibaike.com/')
def get_QSs(self):
'''
获取多个糗事
get many Jokes from QSBK
'''
QS_list = []
_, page = self.get_page(self.baseURL)
rawQSs = page.select('div.article.block.untagged.mb15')
for rawQS in rawQSs:
QS_list.append(self.get_QS(rawQS))
return QS_list
def get_QS(self, rawQS):
'''
获取一个糗事
get a Jokes from QSBK
'''
text = self.get_text(rawQS)
pitures = self.get_pictures(rawQS)
QS = News(title=1, datetime=None, sumary=None,
content=text, pictures=pitures, url='none'
)
return QS
def get_text(self, rawQS):
text = rawQS.select('.content')[0].span.string
try:
return text.strip()
except:
return text
def get_pictures(self, rawQS):
picture_list = []
pictures = rawQS.select('img')
for p in pictures:
picture_list.append(p['src'])
return picture_list
class other(RequestsSpider):
def __init__(self):
RequestsSpider.__init__(self, 'https://www.qiushibaike.com/')
def get_QSs(self):
'''
获取多个糗事
get many Jokes from QSBK
'''
QS_list = []
_, page = self.get_page(self.baseURL)
rawQSs = page.select('div.article.block.untagged.mb15')
for rawQS in rawQSs:
QS_list.append(self.get_QS(rawQS))
return QS_list
def get_QS(self, rawQS):
'''
获取一个糗事
get a Jokes from QSBK
'''
text = self.get_text(rawQS)
pitures = self.get_pictures(rawQS)
QS = News(title=1, datetime=None, sumary=None,
content=text, pictures=pitures, url='none'
)
return QS
def get_text(self, rawQS):
text = rawQS.select('.content')[0].span.string
try:
return text.strip()
except:
return text
def get_pictures(self, rawQS):
picture_list = []
pictures = rawQS.select('img')
for p in pictures:
picture_list.append(p['src'])
return picture_list
if __name__ == '__main__':
t = QSBKSpider()
t.get_QSs()
| 23.385321
| 69
| 0.546881
| 289
| 2,549
| 4.633218
| 0.249135
| 0.035848
| 0.038835
| 0.037341
| 0.885736
| 0.885736
| 0.885736
| 0.885736
| 0.885736
| 0.885736
| 0
| 0.008966
| 0.343664
| 2,549
| 108
| 70
| 23.601852
| 0.791393
| 0.061985
| 0
| 0.857143
| 0
| 0
| 0.072809
| 0.027865
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15873
| false
| 0
| 0.031746
| 0
| 0.380952
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d6a327fd42c80317ae927e22590c7d806635410
| 2,851
|
py
|
Python
|
fastapi/utils/http_util.py
|
zhangnian/fastapi
|
65eb49ec58041fb1212c3e867d19a405d7e40662
|
[
"MIT"
] | 33
|
2017-08-14T09:39:12.000Z
|
2021-09-11T14:54:28.000Z
|
fastapi/utils/http_util.py
|
zhangnian/fastapi
|
65eb49ec58041fb1212c3e867d19a405d7e40662
|
[
"MIT"
] | null | null | null |
fastapi/utils/http_util.py
|
zhangnian/fastapi
|
65eb49ec58041fb1212c3e867d19a405d7e40662
|
[
"MIT"
] | 9
|
2017-12-05T11:54:01.000Z
|
2020-11-10T08:03:35.000Z
|
from flask import request, jsonify
from fastapi.utils.error import APIError
def render_ok(data=None):
if data is None:
data = {}
return jsonify({'code': 0, 'msg': '', 'data': data})
def render_error(code, msg):
assert code != 0
return jsonify({'code': code, 'msg': msg, 'data': {}})
def get_qs_arg(name, parser=None, validator=None):
val = request.args.get(name, None)
if val is None:
raise APIError(ret=1, msg='缺少参数:{}'.format(name))
if parser and callable(parser):
try:
val = parser(val)
except Exception as e:
raise APIError(ret=1, msg='转换参数:{}失败'.format(name))
if validator and callable(validator):
if not validator(val):
raise APIError(ret=1, msg='参数:{}不合法'.format(name))
return val
def get_qs_arg_default(name, default=None, parser=None, validator=None):
val = request.args.get(name, None)
if val is None:
if default is not None:
return default
raise APIError(ret=1, msg='缺少参数:{}'.format(name))
if parser and callable(parser):
try:
val = parser(val)
except Exception as e:
raise APIError(ret=1, msg='转换参数:{}失败'.format(name))
if validator and callable(validator):
if not validator(val):
raise APIError(ret=1, msg='参数:{}不合法'.format(name))
return val
def get_json_arg(name, parser=None, validator=None):
jdata = request.get_json(force=True, silent=True)
if not request.is_json:
raise APIError(ret=1, msg='请求数据格式错误')
if jdata is None:
raise APIError(ret=1, msg='请求数据格式错误')
val = jdata.get(name, None)
if val is None:
raise APIError(ret=1, msg='缺少参数:{}'.format(name))
if parser and callable(parser):
try:
val = parser(val)
except Exception as e:
raise APIError(ret=1, msg='转换参数:{}失败'.format(name))
if validator and callable(validator):
if not validator(val):
raise APIError(ret=1, msg='参数:{}不合法'.format(name))
return val
def get_json_arg_default(name, default=None, parser=None, validator=None):
jdata = request.get_json(force=True, silent=True)
if not request.is_json:
raise APIError(ret=1, msg='请求数据格式错误')
if jdata is None:
raise APIError(ret=1, msg='请求数据格式错误')
val = jdata.get(name, None)
if val is None:
if default is not None:
return default
raise APIError(ret=1, msg='缺少参数:{}'.format(name))
if parser and callable(parser):
try:
val = parser(val)
except Exception as e:
raise APIError(ret=1, msg='转换参数:{}失败'.format(name))
if validator and callable(validator):
if not validator(val):
raise APIError(ret=1, msg='参数:{}不合法'.format(name))
return val
| 27.413462
| 74
| 0.604349
| 395
| 2,851
| 4.321519
| 0.134177
| 0.121851
| 0.149971
| 0.159344
| 0.88225
| 0.88225
| 0.875806
| 0.875806
| 0.875806
| 0.848272
| 0
| 0.008596
| 0.265521
| 2,851
| 104
| 75
| 27.413462
| 0.80659
| 0
| 0
| 0.826667
| 0
| 0
| 0.052595
| 0
| 0
| 0
| 0
| 0
| 0.013333
| 1
| 0.08
| false
| 0
| 0.026667
| 0
| 0.213333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6d8f4b6c27c1ef7f203fdaa8cd066e1d6b734379
| 14,808
|
py
|
Python
|
tests/test_indexer.py
|
se7entyse7en/eviex
|
b1b3e9a50e6fdf2b86b680b0fb9c4ee7cb20d8ee
|
[
"Apache-2.0"
] | 2
|
2020-10-16T08:33:54.000Z
|
2020-10-19T15:56:42.000Z
|
tests/test_indexer.py
|
se7entyse7en/eviex
|
b1b3e9a50e6fdf2b86b680b0fb9c4ee7cb20d8ee
|
[
"Apache-2.0"
] | 4
|
2020-10-17T15:51:13.000Z
|
2020-10-17T16:11:19.000Z
|
tests/test_indexer.py
|
se7entyse7en/eviex
|
b1b3e9a50e6fdf2b86b680b0fb9c4ee7cb20d8ee
|
[
"Apache-2.0"
] | null | null | null |
import itertools
from datetime import datetime
from datetime import timezone
from typing import List
from typing import Tuple
import numpy as np
import pytest
from eviex.indexer import LayerLevel
from eviex.indexer import MemoryIndexer
mock_data_none_granularity = [
{
"timestamp": datetime(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["a"],
},
{
"timestamp": datetime(1970, 1, 1, 0, 0, 0, 1).replace(tzinfo=timezone.utc),
"values": ["b"],
},
{
"timestamp": datetime(1970, 1, 1, 1, 0, 0, 2).replace(tzinfo=timezone.utc),
"values": ["c"],
},
{
"timestamp": datetime(1970, 1, 2, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["d"],
},
{
"timestamp": datetime(1970, 1, 2, 0, 0, 0, 1).replace(tzinfo=timezone.utc),
"values": ["e"],
},
{
"timestamp": datetime(1970, 1, 2, 0, 0, 0, 2).replace(tzinfo=timezone.utc),
"values": ["f"],
},
{
"timestamp": datetime(1971, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["g"],
},
{
"timestamp": datetime(1971, 1, 1, 0, 0, 0, 1).replace(tzinfo=timezone.utc),
"values": ["h"],
},
{
"timestamp": datetime(1971, 1, 1, 0, 0, 0, 2).replace(tzinfo=timezone.utc),
"values": ["i"],
},
]
mock_data_small_granularity = [
{
"timestamp": datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["a"],
},
{
"timestamp": datetime(1970, 1, 1, 0, 45, 0).replace(tzinfo=timezone.utc),
"values": ["b"],
},
{
"timestamp": datetime(1970, 1, 1, 1, 15, 0).replace(tzinfo=timezone.utc),
"values": ["c"],
},
{
"timestamp": datetime(1970, 1, 1, 3, 0, 0).replace(tzinfo=timezone.utc),
"values": ["d"],
},
{
"timestamp": datetime(1970, 1, 1, 3, 15, 0).replace(tzinfo=timezone.utc),
"values": ["e"],
},
{
"timestamp": datetime(1970, 1, 1, 3, 30, 0).replace(tzinfo=timezone.utc),
"values": ["f"],
},
{
"timestamp": datetime(1970, 1, 1, 3, 45, 0).replace(tzinfo=timezone.utc),
"values": ["g"],
},
{
"timestamp": datetime(1970, 1, 1, 4, 0, 0).replace(tzinfo=timezone.utc),
"values": ["h"],
},
{
"timestamp": datetime(1970, 1, 1, 4, 45, 0).replace(tzinfo=timezone.utc),
"values": ["i"],
},
]
mock_data_big_granularity = [
{
"timestamp": datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["a"],
},
{
"timestamp": datetime(1970, 2, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["b"],
},
{
"timestamp": datetime(1970, 3, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["c"],
},
{
"timestamp": datetime(1970, 4, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["d"],
},
{
"timestamp": datetime(1970, 7, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["e"],
},
{
"timestamp": datetime(1970, 9, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["f"],
},
{
"timestamp": datetime(1971, 3, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["g"],
},
{
"timestamp": datetime(1971, 11, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["h"],
},
{
"timestamp": datetime(1971, 12, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
"values": ["i"],
},
]
def generate_levels_combinations(
*min_levels: LayerLevel,
) -> List[Tuple[LayerLevel, LayerLevel]]:
def gen(ml):
return [
(ml, LayerLevel.get(ll_value))
for ll_value in LayerLevel.levels().keys()
if ml.value < ll_value
]
return list(itertools.chain.from_iterable(gen(ml) for ml in min_levels))
@pytest.mark.asyncio
@pytest.mark.parametrize(
"min_level, max_level",
generate_levels_combinations(LayerLevel.NONE, LayerLevel.SECOND, LayerLevel.MINUTE),
)
@pytest.mark.parametrize(
"date_from, date_to, expected",
[
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 5, 0, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h", "i"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1971, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 1, 1, 0, 10, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 3, 40, 0).replace(tzinfo=timezone.utc),
np.array(["b", "c", "d", "e", "f"]),
),
(
datetime(1970, 1, 1, 0, 40, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 50, 0).replace(tzinfo=timezone.utc),
np.array(["b"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 4, 45, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h"]),
),
],
)
async def test_get_with_no_precision_loss_small_granularity(
min_level,
max_level,
date_from,
date_to,
expected,
):
"""Test query with interval granularity not smaller than the mininum layer level.
In this case we don't have any precision loss.
"""
indexer = MemoryIndexer(min_level=min_level, max_level=max_level)
await indexer.load(mock_data_small_granularity)
actual = indexer.get(date_from, date_to)
np.testing.assert_array_equal(actual, expected)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"min_level, max_level",
generate_levels_combinations(LayerLevel.HOUR, LayerLevel.DAY, LayerLevel.MONTH),
)
@pytest.mark.parametrize(
"date_from, date_to, expected",
[
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h", "i"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1973, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 2, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 10, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array(["b", "c", "d", "e", "f"]),
),
(
datetime(1970, 2, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 3, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array(["b"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1971, 12, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h"]),
),
],
)
async def test_get_with_no_precision_loss_big_granularity(
min_level,
max_level,
date_from,
date_to,
expected,
):
"""Test query with interval granularity not smaller than the mininum layer level.
In this case we don't have any precision loss.
"""
indexer = MemoryIndexer(min_level=min_level, max_level=max_level)
await indexer.load(mock_data_big_granularity)
actual = indexer.get(date_from, date_to)
np.testing.assert_array_equal(actual, expected)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"min_level, max_level",
generate_levels_combinations(LayerLevel.HOUR),
)
@pytest.mark.parametrize(
"date_from, date_to, expected",
[
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 5, 0, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h", "i"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1971, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 1, 1, 0, 10, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 3, 40, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c"]),
),
(
datetime(1970, 1, 1, 0, 40, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 50, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 4, 45, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g"]),
),
],
)
async def test_get_with_query_precision_loss_small_granularity(
min_level,
max_level,
date_from,
date_to,
expected,
):
"""Test query with interval granularity smaller than the minimum layer level.
In this case we have a precision loss due to the `date_from` and `date_to` being
transformed to a less granular value corresponding to the minimum layer level.
"""
indexer = MemoryIndexer(min_level=min_level, max_level=max_level)
await indexer.load(mock_data_small_granularity)
actual = indexer.get(date_from, date_to)
np.testing.assert_array_equal(actual, expected)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"min_level, max_level",
generate_levels_combinations(LayerLevel.DAY),
)
@pytest.mark.parametrize(
"date_from, date_to, expected",
[
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h", "i"]),
),
(
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1969, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1972, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1973, 1, 1, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 2, 1, 1, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 7, 1, 3, 0, 0).replace(tzinfo=timezone.utc),
np.array(["b", "c", "d"]),
),
(
datetime(1970, 8, 1, 0, 30, 0).replace(tzinfo=timezone.utc),
datetime(1970, 9, 1, 0, 50, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 1, 1, 12, 0, 0).replace(tzinfo=timezone.utc),
datetime(1971, 11, 1, 6, 30, 0).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g"]),
),
],
)
async def test_get_with_query_precision_loss_big_granularity(
min_level,
max_level,
date_from,
date_to,
expected,
):
"""Test query with interval granularity smaller than the minimum layer level.
In this case we have a precision loss due to the `date_from` and `date_to` being
transformed to a less granular value corresponding to the minimum layer level.
"""
indexer = MemoryIndexer(min_level=min_level, max_level=max_level)
await indexer.load(mock_data_big_granularity)
actual = indexer.get(date_from, date_to)
np.testing.assert_array_equal(actual, expected)
@pytest.mark.asyncio
@pytest.mark.parametrize(
"date_from, date_to, expected",
[
(
datetime(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1971, 1, 1, 0, 0, 0, 3).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h", "i"]),
),
(
datetime(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1969, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1969, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1972, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1973, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
np.array([]),
),
(
datetime(1970, 1, 1, 0, 0, 0, 1).replace(tzinfo=timezone.utc),
datetime(1970, 1, 2, 0, 0, 0, 3).replace(tzinfo=timezone.utc),
np.array(["b", "c", "d", "e", "f"]),
),
(
datetime(1970, 1, 1, 0, 0, 0, 1).replace(tzinfo=timezone.utc),
datetime(1970, 1, 1, 0, 0, 0, 2).replace(tzinfo=timezone.utc),
np.array(["b"]),
),
(
datetime(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=timezone.utc),
datetime(1971, 1, 1, 0, 0, 0, 2).replace(tzinfo=timezone.utc),
np.array(["a", "b", "c", "d", "e", "f", "g", "h"]),
),
],
)
async def test_get_with_none_granularity(
date_from,
date_to,
expected,
):
"""Test query with data with LayerLevel.NONE granularity."""
indexer = MemoryIndexer(min_level=LayerLevel.NONE, max_level=LayerLevel.NONE)
await indexer.load(mock_data_none_granularity)
actual = indexer.get(date_from, date_to)
np.testing.assert_array_equal(actual, expected)
| 32.833703
| 88
| 0.537885
| 1,925
| 14,808
| 4.05974
| 0.064935
| 0.040691
| 0.260653
| 0.297889
| 0.916571
| 0.913244
| 0.910173
| 0.904159
| 0.885605
| 0.851951
| 0
| 0.086715
| 0.282753
| 14,808
| 450
| 89
| 32.906667
| 0.649091
| 0
| 0
| 0.555283
| 0
| 0
| 0.054393
| 0
| 0
| 0
| 0
| 0
| 0.012285
| 1
| 0.004914
| false
| 0
| 0.022113
| 0.002457
| 0.031941
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6daef394d8a916c45977249639c9732be1c7a83f
| 47,007
|
py
|
Python
|
tests/test_ibis.py
|
kamratia1/ecdtools
|
3fa6f56f5b7e6ba4b68ab4e47e1ca6a4ed953c48
|
[
"MIT"
] | null | null | null |
tests/test_ibis.py
|
kamratia1/ecdtools
|
3fa6f56f5b7e6ba4b68ab4e47e1ca6a4ed953c48
|
[
"MIT"
] | null | null | null |
tests/test_ibis.py
|
kamratia1/ecdtools
|
3fa6f56f5b7e6ba4b68ab4e47e1ca6a4ed953c48
|
[
"MIT"
] | null | null | null |
import logging
import unittest
from decimal import Decimal
import ecdtools
from ecdtools import ibis
class IbisTest(unittest.TestCase):
maxDiff = None
def test_load_bushold(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/bushold.ibs')
# General information.
self.assertEqual(ibis_file.ibis_version, '3.2')
self.assertEqual(ibis_file.file_name, 'bushold.ibs')
self.assertEqual(ibis_file.file_revision, '0.2')
self.assertEqual(ibis_file.date, 'June 24, 1998')
self.assertEqual(ibis_file.source, 'Artifical Data')
self.assertEqual(ibis_file.notes, 'This data is a sample, only.')
self.assertEqual(
ibis_file.disclaimer,
'This information is for modeling purposes and is not')
self.assertEqual(ibis_file.copyright, 'None - public sample')
# Components.
self.assertEqual(len(ibis_file.components), 1)
# First (and only) component.
component = ibis_file.components[0]
self.assertEqual(component.name, 'BUS-HOLD-SAMPLE')
self.assertEqual(component.si_location, None)
self.assertEqual(component.timing_location, None)
self.assertEqual(component.manufacturer, 'None')
self.assertEqual(component.package.r_pkg.typical, '200m')
self.assertEqual(component.package.r_pkg.minimum, '100m')
self.assertEqual(component.package.r_pkg.maximum, '300m')
self.assertEqual(component.package.l_pkg.typical, '4.32nH')
self.assertEqual(component.package.l_pkg.minimum, '3.34nH')
self.assertEqual(component.package.l_pkg.maximum, '5.30nH')
self.assertEqual(component.package.c_pkg.typical, '0.38pF')
self.assertEqual(component.package.c_pkg.minimum, '0.33pF')
self.assertEqual(component.package.c_pkg.maximum, '0.43pF')
self.assertEqual(len(component.pins), 3)
self.assertEqual(component.pins[0].name, '1')
self.assertEqual(component.pins[0].signal_name, 'Sample1')
self.assertEqual(component.pins[0].model_name, 'TOP_MODEL_BUS_HOLD')
self.assertEqual(component.pins[0].r_pin, None)
self.assertEqual(component.pins[0].l_pin, None)
self.assertEqual(component.pins[0].c_pin, None)
self.assertEqual(component.pins[1].name, '12')
self.assertEqual(component.pins[1].signal_name, 'GND')
self.assertEqual(component.pins[1].model_name, 'GND')
self.assertEqual(component.pins[1].r_pin, None)
self.assertEqual(component.pins[1].l_pin, None)
self.assertEqual(component.pins[1].c_pin, None)
self.assertEqual(component.pins[2].name, '24')
self.assertEqual(component.pins[2].signal_name, 'VCC')
self.assertEqual(component.pins[2].model_name, 'POWER')
self.assertEqual(component.pins[2].r_pin, None)
self.assertEqual(component.pins[2].l_pin, None)
self.assertEqual(component.pins[2].c_pin, None)
# Models.
self.assertEqual(len(ibis_file.models), 1)
# First (and only) model.
self.assertEqual(ibis_file.models[0].name, 'TOP_MODEL_BUS_HOLD')
self.assertEqual(ibis_file.models[0].model_type, 'Input')
self.assertEqual(ibis_file.models[0].polarity, None)
self.assertEqual(ibis_file.models[0].enable, None)
self.assertEqual(ibis_file.models[0].vinl, '0.8')
self.assertEqual(ibis_file.models[0].vinh, '2.0')
self.assertEqual(ibis_file.models[0].c_comp.typical, '4pF')
self.assertEqual(ibis_file.models[0].c_comp.minimum, '3pF')
self.assertEqual(ibis_file.models[0].c_comp.maximum, '5pF')
self.assertEqual(ibis_file.models[0].vmeas, None)
self.assertEqual(ibis_file.models[0].cref, None)
self.assertEqual(ibis_file.models[0].vref, None)
self.assertEqual(ibis_file.models[0].rref, None)
self.assertEqual(ibis_file.models[0].temperature_range.typical, None)
self.assertEqual(ibis_file.models[0].temperature_range.minimum, None)
self.assertEqual(ibis_file.models[0].temperature_range.maximum, None)
self.assertEqual(ibis_file.models[0].voltage_range.typical, '5.0')
self.assertEqual(ibis_file.models[0].voltage_range.minimum, '4.5')
self.assertEqual(ibis_file.models[0].voltage_range.maximum, '5.5')
self.assertEqual(
ibis_file.models[0].gnd_clamp,
[
('-2.0000e+00', '-6.158e+17', 'NA', 'NA'),
('-1.9000e+00', '-1.697e+16', 'NA', 'NA'),
('-1.8000e+00', '-4.679e+14', 'NA', 'NA'),
('-1.7000e+00', '-1.290e+13', 'NA', 'NA'),
('-1.6000e+00', '-3.556e+11', 'NA', 'NA'),
('-1.5000e+00', '-9.802e+09', 'NA', 'NA'),
('-1.4000e+00', '-2.702e+08', 'NA', 'NA'),
('-1.3000e+00', '-7.449e+06', 'NA', 'NA'),
('-1.2000e+00', '-2.053e+05', 'NA', 'NA'),
('-1.1000e+00', '-5.660e+03', 'NA', 'NA'),
('-1.0000e+00', '-1.560e+02', 'NA', 'NA'),
('-9.0000e-01', '-4.308e+00', 'NA', 'NA'),
('-8.0000e-01', '-1.221e-01', 'NA', 'NA'),
('-7.0000e-01', '-4.315e-03', 'NA', 'NA'),
('-6.0000e-01', '-1.715e-04', 'NA', 'NA'),
('-5.0000e-01', '-4.959e-06', 'NA', 'NA'),
('-4.0000e-01', '-1.373e-07', 'NA', 'NA'),
('-3.0000e-01', '-4.075e-09', 'NA', 'NA'),
('-2.0000e-01', '-3.044e-10', 'NA', 'NA'),
('-1.0000e-01', '-1.030e-10', 'NA', 'NA'),
('0.', '0', 'NA', 'NA'),
('5', '0', 'NA', 'NA')
])
self.assertEqual(
ibis_file.models[0].power_clamp,
[
('-2.0000e+00', '6.158e+17', 'NA', 'NA'),
('-1.9000e+00', '1.697e+16', 'NA', 'NA'),
('-1.8000e+00', '4.679e+14', 'NA', 'NA'),
('-1.7000e+00', '1.290e+13', 'NA', 'NA'),
('-1.6000e+00', '3.556e+11', 'NA', 'NA'),
('-1.5000e+00', '9.802e+09', 'NA', 'NA'),
('-1.4000e+00', '2.702e+08', 'NA', 'NA'),
('-1.3000e+00', '7.449e+06', 'NA', 'NA'),
('-1.2000e+00', '2.053e+05', 'NA', 'NA'),
('-1.1000e+00', '5.660e+03', 'NA', 'NA'),
('-1.0000e+00', '1.560e+02', 'NA', 'NA'),
('-9.0000e-01', '4.308e+00', 'NA', 'NA'),
('-8.0000e-01', '1.221e-01', 'NA', 'NA'),
('-7.0000e-01', '4.315e-03', 'NA', 'NA'),
('-6.0000e-01', '1.715e-04', 'NA', 'NA'),
('-5.0000e-01', '4.959e-06', 'NA', 'NA'),
('-4.0000e-01', '1.373e-07', 'NA', 'NA'),
('-3.0000e-01', '4.075e-09', 'NA', 'NA'),
('-2.0000e-01', '3.044e-10', 'NA', 'NA'),
('-1.0000e-01', '1.030e-10', 'NA', 'NA'),
('0.', '0', 'NA', 'NA'),
('5', '0', 'NA', 'NA')
])
self.assertEqual(
ibis_file.models[0].pullup,
[
('-5V', '100uA', '80uA', '120uA'),
('-1V', '30uA', '25uA', '40uA'),
('0V', '0', '0', '0'),
('1V', '-30uA', '-25uA', '-40uA'),
('3V', '-50uA', '-45uA', '-50uA'),
('5V', '-100uA', '-80uA', '-120uA'),
('10v', '-120uA', '-90uA', '-150uA')
])
self.assertEqual(
ibis_file.models[0].pulldown,
[
('-5V', '-100uA', '-80uA', '-120uA'),
('-1V', '-30uA', '-25uA', '-40uA'),
('0V', '0', '0', '0'),
('1V', '30uA', '25uA', '40uA'),
('3V', '50uA', '45uA', '50uA'),
('5V', '100uA', '80uA', '120uA'),
('10v', '120uA', '90uA', '150uA')
])
self.assertEqual(ibis_file.models[0].ramp.dv_dt_r,
(('2.0', '0.50n'),
('2.0', '0.75n'),
('2.0', '0.35n')))
self.assertEqual(ibis_file.models[0].ramp.dv_dt_f,
(('2.0', '0.50n'),
('2.0', '0.75n'),
('2.0', '0.35n')))
self.assertEqual(ibis_file.models[0].ramp.r_load,
'500')
self.assertEqual(ibis_file.models[0].falling_waveforms, [])
self.assertEqual(ibis_file.models[0].rising_waveforms, [])
self.assertEqual(ibis_file.models[0].add_submodel[0].submodel,
'BUS_HOLD')
self.assertEqual(ibis_file.models[0].add_submodel[0].submodel_mode,
'All')
# Submodels.
self.assertEqual(len(ibis_file.submodels), 1)
# First (and only) submodel.
self.assertEqual(ibis_file.submodels[0].name, 'BUS_HOLD')
self.assertEqual(ibis_file.submodels[0].submodel_type, 'Bus_hold')
submodel_spec = ibis_file.submodels[0].submodel_spec
self.assertEqual(submodel_spec.v_trigger_r.typical, '3.1')
self.assertEqual(submodel_spec.v_trigger_r.minimum, '2.6')
self.assertEqual(submodel_spec.v_trigger_r.maximum, '4.6')
self.assertEqual(submodel_spec.v_trigger_f.typical, '1.3')
self.assertEqual(submodel_spec.v_trigger_f.minimum, '1.2')
self.assertEqual(submodel_spec.v_trigger_f.maximum, '1.4')
self.assertEqual(submodel_spec.off_delay.typical, None)
self.assertEqual(submodel_spec.off_delay.minimum, None)
self.assertEqual(submodel_spec.off_delay.maximum, None)
def test_load_bushold_transform(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/bushold.ibs',
transform=True)
# General information.
self.assertEqual(ibis_file.ibis_version, '3.2')
self.assertEqual(ibis_file.file_name, 'bushold.ibs')
self.assertEqual(ibis_file.file_revision, '0.2')
self.assertEqual(ibis_file.date, 'June 24, 1998')
self.assertEqual(ibis_file.source, 'Artifical Data')
self.assertEqual(ibis_file.notes, 'This data is a sample, only.')
self.assertEqual(
ibis_file.disclaimer,
'This information is for modeling purposes and is not')
self.assertEqual(ibis_file.copyright, 'None - public sample')
# Components.
self.assertEqual(len(ibis_file.components), 1)
# First (and only) component.
component = ibis_file.components[0]
self.assertEqual(component.name, 'BUS-HOLD-SAMPLE')
self.assertEqual(component.si_location, None)
self.assertEqual(component.timing_location, None)
self.assertEqual(component.manufacturer, 'None')
self.assertEqual(component.package.r_pkg.typical, Decimal('200e-3'))
self.assertEqual(component.package.r_pkg.minimum, Decimal('100e-3'))
self.assertEqual(component.package.r_pkg.maximum, Decimal('300e-3'))
self.assertEqual(component.package.l_pkg.typical, Decimal('4.32e-9'))
self.assertEqual(component.package.l_pkg.minimum, Decimal('3.34e-9'))
self.assertEqual(component.package.l_pkg.maximum, Decimal('5.30e-9'))
self.assertEqual(component.package.c_pkg.typical, Decimal('0.38e-12'))
self.assertEqual(component.package.c_pkg.minimum, Decimal('0.33e-12'))
self.assertEqual(component.package.c_pkg.maximum, Decimal('0.43e-12'))
self.assertEqual(len(component.pins), 3)
self.assertEqual(component.pins[0].name, '1')
self.assertEqual(component.pins[0].signal_name, 'Sample1')
self.assertEqual(component.pins[0].model_name, 'TOP_MODEL_BUS_HOLD')
self.assertEqual(component.pins[0].r_pin, None)
self.assertEqual(component.pins[0].l_pin, None)
self.assertEqual(component.pins[0].c_pin, None)
self.assertEqual(component.pins[1].name, '12')
self.assertEqual(component.pins[1].signal_name, 'GND')
self.assertEqual(component.pins[1].model_name, 'GND')
self.assertEqual(component.pins[1].r_pin, None)
self.assertEqual(component.pins[1].l_pin, None)
self.assertEqual(component.pins[1].c_pin, None)
self.assertEqual(component.pins[2].name, '24')
self.assertEqual(component.pins[2].signal_name, 'VCC')
self.assertEqual(component.pins[2].model_name, 'POWER')
self.assertEqual(component.pins[2].r_pin, None)
self.assertEqual(component.pins[2].l_pin, None)
self.assertEqual(component.pins[2].c_pin, None)
# Models.
self.assertEqual(len(ibis_file.models), 1)
# First (and only) model.
self.assertEqual(ibis_file.models[0].name, 'TOP_MODEL_BUS_HOLD')
self.assertEqual(ibis_file.models[0].model_type, 'Input')
self.assertEqual(ibis_file.models[0].polarity, None)
self.assertEqual(ibis_file.models[0].enable, None)
self.assertEqual(ibis_file.models[0].vinl, Decimal('0.8'))
self.assertEqual(ibis_file.models[0].vinh, Decimal('2.0'))
self.assertEqual(ibis_file.models[0].c_comp.typical, Decimal('4e-12'))
self.assertEqual(ibis_file.models[0].c_comp.minimum, Decimal('3e-12'))
self.assertEqual(ibis_file.models[0].c_comp.maximum, Decimal('5e-12'))
self.assertEqual(ibis_file.models[0].vmeas, None)
self.assertEqual(ibis_file.models[0].cref, None)
self.assertEqual(ibis_file.models[0].vref, None)
self.assertEqual(ibis_file.models[0].rref, None)
self.assertEqual(ibis_file.models[0].temperature_range.typical, None)
self.assertEqual(ibis_file.models[0].temperature_range.minimum, None)
self.assertEqual(ibis_file.models[0].temperature_range.maximum, None)
self.assertEqual(ibis_file.models[0].voltage_range.typical, Decimal('5.0'))
self.assertEqual(ibis_file.models[0].voltage_range.minimum, Decimal('4.5'))
self.assertEqual(ibis_file.models[0].voltage_range.maximum, Decimal('5.5'))
self.assertEqual(
ibis_file.models[0].gnd_clamp,
[
(Decimal('-2.0000e+00'), Decimal('-6.158e+17'), None, None),
(Decimal('-1.9000e+00'), Decimal('-1.697e+16'), None, None),
(Decimal('-1.8000e+00'), Decimal('-4.679e+14'), None, None),
(Decimal('-1.7000e+00'), Decimal('-1.290e+13'), None, None),
(Decimal('-1.6000e+00'), Decimal('-3.556e+11'), None, None),
(Decimal('-1.5000e+00'), Decimal('-9.802e+09'), None, None),
(Decimal('-1.4000e+00'), Decimal('-2.702e+08'), None, None),
(Decimal('-1.3000e+00'), Decimal('-7.449e+06'), None, None),
(Decimal('-1.2000e+00'), Decimal('-2.053e+05'), None, None),
(Decimal('-1.1000e+00'), Decimal('-5.660e+03'), None, None),
(Decimal('-1.0000e+00'), Decimal('-1.560e+02'), None, None),
(Decimal('-9.0000e-01'), Decimal('-4.308e+00'), None, None),
(Decimal('-8.0000e-01'), Decimal('-1.221e-01'), None, None),
(Decimal('-7.0000e-01'), Decimal('-4.315e-03'), None, None),
(Decimal('-6.0000e-01'), Decimal('-1.715e-04'), None, None),
(Decimal('-5.0000e-01'), Decimal('-4.959e-06'), None, None),
(Decimal('-4.0000e-01'), Decimal('-1.373e-07'), None, None),
(Decimal('-3.0000e-01'), Decimal('-4.075e-09'), None, None),
(Decimal('-2.0000e-01'), Decimal('-3.044e-10'), None, None),
(Decimal('-1.0000e-01'), Decimal('-1.030e-10'), None, None),
(Decimal('0.'), Decimal('0'), None, None),
(Decimal('5'), Decimal('0'), None, None)
])
self.assertEqual(
ibis_file.models[0].power_clamp,
[
(Decimal('-2.0000e+00'), Decimal('6.158e+17'), None, None),
(Decimal('-1.9000e+00'), Decimal('1.697e+16'), None, None),
(Decimal('-1.8000e+00'), Decimal('4.679e+14'), None, None),
(Decimal('-1.7000e+00'), Decimal('1.290e+13'), None, None),
(Decimal('-1.6000e+00'), Decimal('3.556e+11'), None, None),
(Decimal('-1.5000e+00'), Decimal('9.802e+09'), None, None),
(Decimal('-1.4000e+00'), Decimal('2.702e+08'), None, None),
(Decimal('-1.3000e+00'), Decimal('7.449e+06'), None, None),
(Decimal('-1.2000e+00'), Decimal('2.053e+05'), None, None),
(Decimal('-1.1000e+00'), Decimal('5.660e+03'), None, None),
(Decimal('-1.0000e+00'), Decimal('1.560e+02'), None, None),
(Decimal('-9.0000e-01'), Decimal('4.308e+00'), None, None),
(Decimal('-8.0000e-01'), Decimal('1.221e-01'), None, None),
(Decimal('-7.0000e-01'), Decimal('4.315e-03'), None, None),
(Decimal('-6.0000e-01'), Decimal('1.715e-04'), None, None),
(Decimal('-5.0000e-01'), Decimal('4.959e-06'), None, None),
(Decimal('-4.0000e-01'), Decimal('1.373e-07'), None, None),
(Decimal('-3.0000e-01'), Decimal('4.075e-09'), None, None),
(Decimal('-2.0000e-01'), Decimal('3.044e-10'), None, None),
(Decimal('-1.0000e-01'), Decimal('1.030e-10'), None, None),
(Decimal('0.'), Decimal('0'), None, None),
(Decimal('5'), Decimal('0'), None, None)
])
self.assertEqual(
ibis_file.models[0].pullup,
[
(Decimal('-5'), Decimal('100e-6'), Decimal('80e-6'), Decimal('120e-6')),
(Decimal('-1'), Decimal('30e-6'), Decimal('25e-6'), Decimal('40e-6')),
(Decimal('0'), Decimal('0'), Decimal('0'), Decimal('0')),
(Decimal('1'), Decimal('-30e-6'), Decimal('-25e-6'), Decimal('-40e-6')),
(Decimal('3'), Decimal('-50e-6'), Decimal('-45e-6'), Decimal('-50e-6')),
(Decimal('5'), Decimal('-100e-6'), Decimal('-80e-6'), Decimal('-120e-6')),
(Decimal('10'), Decimal('-120e-6'), Decimal('-90e-6'), Decimal('-150e-6'))
])
self.assertEqual(
ibis_file.models[0].pulldown,
[
(Decimal('-5'), Decimal('-100e-6'), Decimal('-80e-6'), Decimal('-120e-6')),
(Decimal('-1'), Decimal('-30e-6'), Decimal('-25e-6'), Decimal('-40e-6')),
(Decimal('0'), Decimal('0'), Decimal('0'), Decimal('0')),
(Decimal('1'), Decimal('30e-6'), Decimal('25e-6'), Decimal('40e-6')),
(Decimal('3'), Decimal('50e-6'), Decimal('45e-6'), Decimal('50e-6')),
(Decimal('5'), Decimal('100e-6'), Decimal('80e-6'), Decimal('120e-6')),
(Decimal('10'), Decimal('120e-6'), Decimal('90e-6'), Decimal('150e-6'))
])
self.assertEqual(ibis_file.models[0].ramp.dv_dt_r,
((Decimal('2.0'), Decimal('0.50e-9')),
(Decimal('2.0'), Decimal('0.75e-9')),
(Decimal('2.0'), Decimal('0.35e-9'))))
self.assertEqual(ibis_file.models[0].ramp.dv_dt_f,
((Decimal('2.0'), Decimal('0.50e-9')),
(Decimal('2.0'), Decimal('0.75e-9')),
(Decimal('2.0'), Decimal('0.35e-9'))))
self.assertEqual(ibis_file.models[0].ramp.r_load,
Decimal('500'))
self.assertEqual(ibis_file.models[0].falling_waveforms, [])
self.assertEqual(ibis_file.models[0].rising_waveforms, [])
def test_load_sample1(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/sample1.ibs')
# General information.
self.assertEqual(ibis_file.ibis_version, '3.2')
self.assertEqual(ibis_file.file_name, 'sample1.ibs')
self.assertEqual(ibis_file.file_revision, '@(#)$Revision: 0.1')
self.assertEqual(ibis_file.date, 'September 11, 2015')
self.assertEqual(ibis_file.source, 'Company_ABC, Adapted From Real IBIS Model')
self.assertEqual(ibis_file.notes, None)
self.assertEqual(ibis_file.disclaimer, None)
self.assertEqual(ibis_file.copyright, 'Public Sample')
# Components.
self.assertEqual(len(ibis_file.components), 1)
# First (and only) component.
component = ibis_file.components[0]
self.assertEqual(component.name, 'WXY123')
self.assertEqual(component.si_location, None)
self.assertEqual(component.timing_location, None)
self.assertEqual(component.manufacturer, 'Company_ABC')
self.assertEqual(component.package.r_pkg.typical, '0.0m')
self.assertEqual(component.package.r_pkg.minimum, '0.0m')
self.assertEqual(component.package.r_pkg.maximum, '0.0m')
self.assertEqual(component.package.l_pkg.typical, '3.0nH')
self.assertEqual(component.package.l_pkg.minimum, '2.0nH')
self.assertEqual(component.package.l_pkg.maximum, '4.0nH')
self.assertEqual(component.package.c_pkg.typical, '0.5pF')
self.assertEqual(component.package.c_pkg.minimum, '0.3pF')
self.assertEqual(component.package.c_pkg.maximum, '0.8pf')
self.assertEqual(len(component.pins), 231)
self.assertEqual(component.pins[0].name, 'A10')
self.assertEqual(component.pins[0].signal_name, 'cs1')
self.assertEqual(component.pins[0].model_name, 'BT2Z50CX')
self.assertEqual(component.pins[0].r_pin, '32m')
self.assertEqual(component.pins[0].l_pin, '3.44nH')
self.assertEqual(component.pins[0].c_pin, '0.46pF')
self.assertEqual(component.pins[230].name, 'Y9')
self.assertEqual(component.pins[230].signal_name, 'sc_moden')
self.assertEqual(component.pins[230].model_name, 'BPS2P4F_PU50K')
self.assertEqual(component.pins[230].r_pin, '32m')
self.assertEqual(component.pins[230].l_pin, '3.45nH')
self.assertEqual(component.pins[230].c_pin, '0.46pF')
self.assertEqual(len(component.diff_pins), 1)
self.assertEqual(component.diff_pins[0].name, 'E17')
self.assertEqual(component.diff_pins[0].inv_pin, 'D18')
self.assertEqual(component.diff_pins[0].vdiff, '2.0')
self.assertEqual(component.diff_pins[0].tdelay_typ, 'NA')
self.assertEqual(component.diff_pins[0].tdelay_min, 'NA')
self.assertEqual(component.diff_pins[0].tdelay_max, 'NA')
# Model selector.
self.assertEqual(len(ibis_file.model_selectors), 1)
model_selector = ibis_file.model_selectors[0]
self.assertEqual(model_selector.name, 'BUSB6AU')
self.assertEqual(len(model_selector.models), 2)
self.assertEqual(model_selector.models[0].name, 'BUSB6AU_HIGH_SPEED')
self.assertEqual(model_selector.models[0].description, 'USB_HIGH_SPEED foo bar')
self.assertEqual(model_selector.models[1].name, 'BUSB6AU_LOW_SPEED')
self.assertEqual(model_selector.models[1].description, 'USB_LOW_SPEED')
# Models.
self.assertEqual(len(ibis_file.models), 14)
# First model.
self.assertEqual(ibis_file.models[0].name, 'BIP00F')
self.assertEqual(ibis_file.models[0].model_type, 'Input')
self.assertEqual(ibis_file.models[0].polarity, 'Non-Inverting')
self.assertEqual(ibis_file.models[0].enable, None)
self.assertEqual(ibis_file.models[0].vinl, '0.8V')
self.assertEqual(ibis_file.models[0].vinh, '2.0V')
self.assertEqual(ibis_file.models[0].c_comp.typical, '0.737pF')
self.assertEqual(ibis_file.models[0].c_comp.minimum, 'NA')
self.assertEqual(ibis_file.models[0].c_comp.maximum, 'NA')
self.assertEqual(ibis_file.models[0].vmeas, None)
self.assertEqual(ibis_file.models[0].cref, None)
self.assertEqual(ibis_file.models[0].vref, None)
self.assertEqual(ibis_file.models[0].rref, None)
self.assertEqual(ibis_file.models[0].temperature_range.typical, '25')
self.assertEqual(ibis_file.models[0].temperature_range.minimum, '0')
self.assertEqual(ibis_file.models[0].temperature_range.maximum, '125')
self.assertEqual(ibis_file.models[0].voltage_range.typical, '3.3V')
self.assertEqual(ibis_file.models[0].voltage_range.minimum, '3.0V')
self.assertEqual(ibis_file.models[0].voltage_range.maximum, '3.6V')
self.assertEqual(len(ibis_file.models[0].gnd_clamp), 67)
self.assertEqual(
ibis_file.models[0].gnd_clamp[0],
('-3.30000', '-11.46380A', '-11.71150A', '-11.40800A'))
self.assertEqual(
ibis_file.models[0].gnd_clamp[66],
('3.30000', '6.60800pA', '26.57000nA', '6.32380pA'))
self.assertEqual(len(ibis_file.models[0].power_clamp), 34)
self.assertEqual(
ibis_file.models[0].power_clamp[0],
('-3.30000', '10.22730A', '10.48520A', '10.16810A'))
self.assertEqual(
ibis_file.models[0].power_clamp[33],
('0.00000', '6.59380pA', '10.90680pA', '7.23330pA'))
self.assertEqual(ibis_file.models[0].pullup, None)
self.assertEqual(ibis_file.models[0].pulldown, None)
self.assertEqual(ibis_file.models[0].ramp, None)
self.assertEqual(ibis_file.models[0].falling_waveforms, [])
self.assertEqual(ibis_file.models[0].rising_waveforms, [])
# Last model.
self.assertEqual(ibis_file.models[11].name, 'BT2Z50CX_PU50K')
self.assertEqual(ibis_file.models[11].model_type, 'I/O')
self.assertEqual(ibis_file.models[11].polarity, 'Non-Inverting')
self.assertEqual(ibis_file.models[11].enable, 'Active-High')
self.assertEqual(ibis_file.models[11].vinl, '0.8V')
self.assertEqual(ibis_file.models[11].vinh, '2.0V')
self.assertEqual(ibis_file.models[11].c_comp.typical, '1.26pF')
self.assertEqual(ibis_file.models[11].c_comp.minimum, 'NA')
self.assertEqual(ibis_file.models[11].c_comp.maximum, 'NA')
self.assertEqual(ibis_file.models[11].vmeas, '1.65V')
self.assertEqual(ibis_file.models[11].cref, '1.0pF')
self.assertEqual(ibis_file.models[11].vref, '0V')
self.assertEqual(ibis_file.models[11].rref, '1Mohms')
self.assertEqual(ibis_file.models[11].temperature_range.typical, '25')
self.assertEqual(ibis_file.models[11].temperature_range.minimum, '0')
self.assertEqual(ibis_file.models[11].temperature_range.maximum, '125')
self.assertEqual(ibis_file.models[11].voltage_range.typical, '3.3V')
self.assertEqual(ibis_file.models[11].voltage_range.minimum, '3.0V')
self.assertEqual(ibis_file.models[11].voltage_range.maximum, '3.6V')
self.assertEqual(len(ibis_file.models[11].gnd_clamp), 35)
self.assertEqual(len(ibis_file.models[11].power_clamp), 58)
self.assertEqual(len(ibis_file.models[11].pullup), 100)
self.assertEqual(len(ibis_file.models[11].pulldown), 100)
self.assertEqual(ibis_file.models[11].ramp.dv_dt_r,
(('0.950146V', '0.229176ns'),
('0.891184V', '0.260292ns'),
('1.0311V', '0.221583ns')))
self.assertEqual(ibis_file.models[11].ramp.dv_dt_f,
(('0.94062V', '0.319408ns'),
('0.89298V', '0.416034ns'),
('1.00716V', '0.284201ns')))
self.assertEqual(ibis_file.models[11].ramp.r_load, '50')
# Falling waveforms.
self.assertEqual(len(ibis_file.models[11].falling_waveforms), 2)
waveform = ibis_file.models[11].falling_waveforms[0]
self.assertEqual(waveform.r_fixture, '50')
self.assertEqual(waveform.v_fixture.typical, '3.3')
self.assertEqual(waveform.v_fixture.minimum, '3.0')
self.assertEqual(waveform.v_fixture.maximum, '3.6')
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
('0.00000S', '3.30000V', '3.00000V', '3.60000V'))
self.assertEqual(
waveform.table.samples[99],
('1.50000nS', '1.73230V', '1.51170V', '1.92140V'))
waveform = ibis_file.models[11].falling_waveforms[1]
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
('0.00000S', '1.59770V', '1.50670V', '1.73430V'))
self.assertEqual(
waveform.table.samples[99],
('1.35000nS', '14.88300mV', '24.09000mV', '19.09910mV'))
# Rising waveforms.
self.assertEqual(len(ibis_file.models[11].rising_waveforms), 2)
waveform = ibis_file.models[11].rising_waveforms[0]
self.assertEqual(waveform.r_fixture, '50')
self.assertEqual(waveform.v_fixture.typical, '0.000')
self.assertEqual(waveform.v_fixture.minimum, '0.000')
self.assertEqual(waveform.v_fixture.maximum, '0.000')
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
('0.00000S', '923.25950uV', '493.94600uV', '1.40550mV'))
self.assertEqual(
waveform.table.samples[99],
('1.50000nS', '1.58450V', '1.48580V', '1.71990V'))
def test_load_sample1_transform(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/sample1.ibs',
transform=True)
# General information.
self.assertEqual(ibis_file.ibis_version, '3.2')
self.assertEqual(ibis_file.file_name, 'sample1.ibs')
self.assertEqual(ibis_file.file_revision, '@(#)$Revision: 0.1')
self.assertEqual(ibis_file.date, 'September 11, 2015')
self.assertEqual(ibis_file.source, 'Company_ABC, Adapted From Real IBIS Model')
self.assertEqual(ibis_file.notes, None)
self.assertEqual(ibis_file.disclaimer, None)
self.assertEqual(ibis_file.copyright, 'Public Sample')
# Components.
self.assertEqual(len(ibis_file.components), 1)
# First (and only) component.
component = ibis_file.components[0]
self.assertEqual(component.name, 'WXY123')
self.assertEqual(component.si_location, None)
self.assertEqual(component.timing_location, None)
self.assertEqual(component.manufacturer, 'Company_ABC')
self.assertEqual(component.package.r_pkg.typical, Decimal('0.0e-3'))
self.assertEqual(component.package.r_pkg.minimum, Decimal('0.0e-3'))
self.assertEqual(component.package.r_pkg.maximum, Decimal('0.0e-3'))
self.assertEqual(component.package.l_pkg.typical, Decimal('3.0e-9'))
self.assertEqual(component.package.l_pkg.minimum, Decimal('2.0e-9'))
self.assertEqual(component.package.l_pkg.maximum, Decimal('4.0e-9'))
self.assertEqual(component.package.c_pkg.typical, Decimal('0.5e-12'))
self.assertEqual(component.package.c_pkg.minimum, Decimal('0.3e-12'))
self.assertEqual(component.package.c_pkg.maximum, Decimal('0.8e-12'))
self.assertEqual(len(component.pins), 231)
self.assertEqual(component.pins[0].name, 'A10')
self.assertEqual(component.pins[0].signal_name, 'cs1')
self.assertEqual(component.pins[0].model_name, 'BT2Z50CX')
self.assertEqual(component.pins[0].r_pin, Decimal('32e-3'))
self.assertEqual(component.pins[0].l_pin, Decimal('3.44e-9'))
self.assertEqual(component.pins[0].c_pin, Decimal('0.46e-12'))
self.assertEqual(component.pins[230].name, 'Y9')
self.assertEqual(component.pins[230].signal_name, 'sc_moden')
self.assertEqual(component.pins[230].model_name, 'BPS2P4F_PU50K')
self.assertEqual(component.pins[230].r_pin, Decimal('32e-3'))
self.assertEqual(component.pins[230].l_pin, Decimal('3.45e-9'))
self.assertEqual(component.pins[230].c_pin, Decimal('0.46e-12'))
# Models.
self.assertEqual(len(ibis_file.models), 14)
# First model.
self.assertEqual(ibis_file.models[0].name, 'BIP00F')
self.assertEqual(ibis_file.models[0].model_type, 'Input')
self.assertEqual(ibis_file.models[0].polarity, 'Non-Inverting')
self.assertEqual(ibis_file.models[0].enable, None)
self.assertEqual(ibis_file.models[0].vinl, Decimal('0.8'))
self.assertEqual(ibis_file.models[0].vinh, Decimal('2.0'))
self.assertEqual(ibis_file.models[0].c_comp.typical, Decimal('0.737e-12'))
self.assertEqual(ibis_file.models[0].c_comp.minimum, None)
self.assertEqual(ibis_file.models[0].c_comp.maximum, None)
self.assertEqual(ibis_file.models[0].vmeas, None)
self.assertEqual(ibis_file.models[0].cref, None)
self.assertEqual(ibis_file.models[0].vref, None)
self.assertEqual(ibis_file.models[0].rref, None)
self.assertEqual(ibis_file.models[0].temperature_range.typical, Decimal('25'))
self.assertEqual(ibis_file.models[0].temperature_range.minimum, Decimal('0'))
self.assertEqual(ibis_file.models[0].temperature_range.maximum, Decimal('125'))
self.assertEqual(ibis_file.models[0].voltage_range.typical, Decimal('3.3'))
self.assertEqual(ibis_file.models[0].voltage_range.minimum, Decimal('3.0'))
self.assertEqual(ibis_file.models[0].voltage_range.maximum, Decimal('3.6'))
self.assertEqual(len(ibis_file.models[0].gnd_clamp), 67)
self.assertEqual(
ibis_file.models[0].gnd_clamp[0],
(Decimal('-3.30000'),
Decimal('-11.46380'),
Decimal('-11.71150'),
Decimal('-11.40800')))
self.assertEqual(
ibis_file.models[0].gnd_clamp[66],
(Decimal('3.30000'),
Decimal('6.60800e-12'),
Decimal('26.57000e-9'),
Decimal('6.32380e-12')))
self.assertEqual(len(ibis_file.models[0].power_clamp), 34)
self.assertEqual(
ibis_file.models[0].power_clamp[0],
(Decimal('-3.30000'),
Decimal('10.22730'),
Decimal('10.48520'),
Decimal('10.16810')))
self.assertEqual(
ibis_file.models[0].power_clamp[33],
(Decimal('0.00000'),
Decimal('6.59380e-12'),
Decimal('10.90680e-12'),
Decimal('7.23330e-12')))
self.assertEqual(ibis_file.models[0].pullup, None)
self.assertEqual(ibis_file.models[0].pulldown, None)
self.assertEqual(ibis_file.models[0].ramp, None)
self.assertEqual(ibis_file.models[0].falling_waveforms, [])
self.assertEqual(ibis_file.models[0].rising_waveforms, [])
# Last model.
self.assertEqual(ibis_file.models[11].name, 'BT2Z50CX_PU50K')
self.assertEqual(ibis_file.models[11].model_type, 'I/O')
self.assertEqual(ibis_file.models[11].polarity, 'Non-Inverting')
self.assertEqual(ibis_file.models[11].enable, 'Active-High')
self.assertEqual(ibis_file.models[11].vinl, Decimal('0.8'))
self.assertEqual(ibis_file.models[11].vinh, Decimal('2.0'))
self.assertEqual(ibis_file.models[11].c_comp.typical, Decimal('1.26e-12'))
self.assertEqual(ibis_file.models[11].c_comp.minimum, None)
self.assertEqual(ibis_file.models[11].c_comp.maximum, None)
self.assertEqual(ibis_file.models[11].vmeas, Decimal('1.65'))
self.assertEqual(ibis_file.models[11].cref, Decimal('1.0e-12'))
self.assertEqual(ibis_file.models[11].vref, Decimal('0'))
self.assertEqual(ibis_file.models[11].rref, Decimal('1e6'))
self.assertEqual(ibis_file.models[11].temperature_range.typical, Decimal('25'))
self.assertEqual(ibis_file.models[11].temperature_range.minimum, Decimal('0'))
self.assertEqual(ibis_file.models[11].temperature_range.maximum, Decimal('125'))
self.assertEqual(ibis_file.models[11].voltage_range.typical, Decimal('3.3'))
self.assertEqual(ibis_file.models[11].voltage_range.minimum, Decimal('3.0'))
self.assertEqual(ibis_file.models[11].voltage_range.maximum, Decimal('3.6'))
self.assertEqual(len(ibis_file.models[11].gnd_clamp), 35)
self.assertEqual(len(ibis_file.models[11].power_clamp), 58)
self.assertEqual(len(ibis_file.models[11].pullup), 100)
self.assertEqual(len(ibis_file.models[11].pulldown), 100)
self.assertEqual(ibis_file.models[11].ramp.dv_dt_r,
((Decimal('0.950146'), Decimal('0.229176e-9')),
(Decimal('0.891184'), Decimal('0.260292e-9')),
(Decimal('1.0311'), Decimal('0.221583e-9'))))
self.assertEqual(ibis_file.models[11].ramp.dv_dt_f,
((Decimal('0.94062'), Decimal('0.319408e-9')),
(Decimal('0.89298'), Decimal('0.416034e-9')),
(Decimal('1.00716'), Decimal('0.284201e-9'))))
self.assertEqual(ibis_file.models[11].ramp.r_load, Decimal('50'))
# Falling waveforms.
self.assertEqual(len(ibis_file.models[11].falling_waveforms), 2)
waveform = ibis_file.models[11].falling_waveforms[0]
self.assertEqual(waveform.r_fixture, Decimal('50'))
self.assertEqual(waveform.v_fixture.typical, Decimal('3.3'))
self.assertEqual(waveform.v_fixture.minimum, Decimal('3.0'))
self.assertEqual(waveform.v_fixture.maximum, Decimal('3.6'))
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
(Decimal('0.00000'),
Decimal('3.30000'),
Decimal('3.00000'),
Decimal('3.60000')))
self.assertEqual(
waveform.table.samples[99],
(Decimal('1.50000e-9'),
Decimal('1.73230'),
Decimal('1.51170'),
Decimal('1.92140')))
waveform = ibis_file.models[11].falling_waveforms[1]
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
(Decimal('0.00000'),
Decimal('1.59770'),
Decimal('1.50670'),
Decimal('1.73430')))
self.assertEqual(
waveform.table.samples[99],
(Decimal('1.35000e-9'),
Decimal('14.88300e-3'),
Decimal('24.09000e-3'),
Decimal('19.09910e-3')))
# Rising waveforms.
self.assertEqual(len(ibis_file.models[11].rising_waveforms), 2)
waveform = ibis_file.models[11].rising_waveforms[0]
self.assertEqual(waveform.r_fixture, Decimal('50'))
self.assertEqual(waveform.v_fixture.typical, Decimal('0.000'))
self.assertEqual(waveform.v_fixture.minimum, Decimal('0.000'))
self.assertEqual(waveform.v_fixture.maximum, Decimal('0.000'))
self.assertEqual(len(waveform.table.samples), 100)
self.assertEqual(
waveform.table.samples[0],
(Decimal('0.00000'),
Decimal('923.25950e-6'),
Decimal('493.94600e-6'),
Decimal('1.40550e-3')))
self.assertEqual(
waveform.table.samples[99],
(Decimal('1.50000e-9'),
Decimal('1.58450'),
Decimal('1.48580'),
Decimal('1.71990')))
def test_convert_numerical(self):
self.assertEqual(ibis.convert_numerical('1.1T'), Decimal('1.1e12'))
self.assertEqual(ibis.convert_numerical('1.1G'), Decimal('1.1e9'))
self.assertEqual(ibis.convert_numerical('1.1M'), Decimal('1.1e6'))
self.assertEqual(ibis.convert_numerical('1.1k'), Decimal('1.1e3'))
self.assertEqual(ibis.convert_numerical('1.1'), Decimal('1.1'))
self.assertEqual(ibis.convert_numerical('1.1m'), Decimal('1.1e-3'))
self.assertEqual(ibis.convert_numerical('1.1u'), Decimal('1.1e-6'))
self.assertEqual(ibis.convert_numerical('1.1n'), Decimal('1.1e-9'))
self.assertEqual(ibis.convert_numerical('1.1p'), Decimal('1.1e-12'))
self.assertEqual(ibis.convert_numerical('1.1f'), Decimal('1.1e-15'))
self.assertEqual(ibis.convert_numerical('2.123E-5'), Decimal('0.00002123'))
self.assertEqual(ibis.convert_numerical('2.123e5'), Decimal('2.123e5'))
self.assertEqual(ibis.convert_numerical('2.123E+5'), Decimal('2.123e5'))
self.assertEqual(ibis.convert_numerical('1.1Ohm'), Decimal('1.1'))
self.assertEqual(ibis.convert_numerical('1.1kOhm'), Decimal('1.1e3'))
self.assertEqual(ibis.convert_numerical('5.1e-3V'), Decimal('0.0051'))
def test_split_numerical(self):
self.assertEqual(ibis.split_numerical('1.1T'), ('1.1', 'T', ''))
self.assertEqual(ibis.split_numerical('1.1G'), ('1.1', 'G', ''))
self.assertEqual(ibis.split_numerical('1.1M'), ('1.1', 'M', ''))
self.assertEqual(ibis.split_numerical('1.1k'), ('1.1', 'k', ''))
self.assertEqual(ibis.split_numerical('1.1'), ('1.1', '', ''))
self.assertEqual(ibis.split_numerical('1.1m'), ('1.1', 'm', ''))
self.assertEqual(ibis.split_numerical('1.1u'), ('1.1', 'u', ''))
self.assertEqual(ibis.split_numerical('1.1n'), ('1.1', 'n', ''))
self.assertEqual(ibis.split_numerical('1.1p'), ('1.1', 'p', ''))
self.assertEqual(ibis.split_numerical('1.1f'), ('1.1', 'f', ''))
self.assertEqual(ibis.split_numerical('2.123E-5'), ('2.123E-5', '', ''))
self.assertEqual(ibis.split_numerical('2.123e5'), ('2.123e5', '', ''))
self.assertEqual(ibis.split_numerical('2.123E+5'), ('2.123E+5', '', ''))
self.assertEqual(ibis.split_numerical('1.1Ohm'), ('1.1', '', 'Ohm'))
self.assertEqual(ibis.split_numerical('1.1kOhm'), ('1.1', 'k', 'Ohm'))
self.assertEqual(ibis.split_numerical('5.1e-3V'), ('5.1e-3', '', 'V'))
def test_get_model_selector_by_name(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/sample1.ibs')
model_selector = ibis_file.get_model_selector_by_name('BUSB6AU')
self.assertEqual(model_selector.name, 'BUSB6AU')
with self.assertRaises(ecdtools.ibis.Error) as cm:
ibis_file.get_model_selector_by_name('Missing')
self.assertEqual(
str(cm.exception),
'Expected model selector name BUSB6AU, but got Missing.')
def test_get_model_by_name(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/bushold.ibs')
model = ibis_file.get_model_by_name('TOP_MODEL_BUS_HOLD')
self.assertEqual(model.name, 'TOP_MODEL_BUS_HOLD')
with self.assertRaises(ecdtools.ibis.Error) as cm:
ibis_file.get_model_by_name('Missing')
self.assertEqual(
str(cm.exception),
'Expected model name TOP_MODEL_BUS_HOLD, but got Missing.')
def test_get_component_by_name(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/bushold.ibs')
component = ibis_file.get_component_by_name('BUS-HOLD-SAMPLE')
self.assertEqual(component.name, 'BUS-HOLD-SAMPLE')
with self.assertRaises(ecdtools.ibis.Error) as cm:
ibis_file.get_component_by_name('Missing')
self.assertEqual(
str(cm.exception),
'Expected component name BUS-HOLD-SAMPLE, but got Missing.')
def test_load_pybis_files(self):
filenames = [
'sterm.ibs',
'sample2.ibs',
'sample1(original).ibs',
'sample1.ibs',
'ideal_driver.ibs',
'diff_pecl_term.ibs',
'dclamptr.ibs',
'dclampst.ibs',
'cbt.ibs',
'bushold.ibs',
'bird57ex.ibs',
'bugs/bug74.ibs',
'bugs/bug81.ibs',
'bugs/bug82.ibs',
'bugs/bug86.ibs',
'bugs/bug87.ibs'
]
for filename in filenames:
ibis.load_file('tests/files/ibis/pybis/' + filename)
def test_no_r_l_c_pin_columns(self):
ibis_file = ibis.load_file('tests/files/ibis/pybis/no_r_l_c_pin_columns.ibs')
# Components.
self.assertEqual(len(ibis_file.components), 1)
# First (and only) component.
component = ibis_file.components[0]
self.assertEqual(len(component.pins), 3)
self.assertEqual(component.pins[0].name, '1')
self.assertEqual(component.pins[0].signal_name, 'Sample1')
self.assertEqual(component.pins[0].model_name, 'TOP_MODEL_TERM')
self.assertEqual(component.pins[0].r_pin, None)
self.assertEqual(component.pins[0].l_pin, None)
self.assertEqual(component.pins[0].c_pin, None)
def test_device_clamp_ref(self):
ibis_file = ibis.load_file('tests/files/ibis/device_clamp_ref.ibs', transform=True)
# Check the IN model
model = ibis_file.get_model_by_name('IN')
self.assertEqual(model.pullup_reference.minimum, Decimal('4.5'))
self.assertEqual(model.pullup_reference.typical, Decimal('5'))
self.assertEqual(model.pullup_reference.maximum, Decimal('5.5'))
self.assertEqual(model.pulldown_reference.minimum, Decimal('-0.5'))
self.assertEqual(model.pulldown_reference.typical, Decimal('0'))
self.assertEqual(model.pulldown_reference.maximum, Decimal('0.5'))
self.assertEqual(model.power_clamp_reference.minimum, Decimal('4.5'))
self.assertEqual(model.power_clamp_reference.typical, Decimal('5'))
self.assertEqual(model.power_clamp_reference.maximum, Decimal('5.5'))
self.assertEqual(model.gnd_clamp_reference.minimum, Decimal('-0.5'))
self.assertEqual(model.gnd_clamp_reference.typical, Decimal('0'))
self.assertEqual(model.gnd_clamp_reference.maximum, Decimal('0.5'))
# Check the OUT model
model = ibis_file.get_model_by_name('OUT')
self.assertEqual(model.pullup_reference.minimum, Decimal('2.7'))
self.assertEqual(model.pullup_reference.typical, Decimal('3'))
self.assertEqual(model.pullup_reference.maximum, Decimal('3.3'))
self.assertEqual(model.pulldown_reference.minimum, Decimal('-0.5'))
self.assertEqual(model.pulldown_reference.typical, Decimal('0'))
self.assertEqual(model.pulldown_reference.maximum, Decimal('0.5'))
self.assertEqual(model.power_clamp_reference.minimum, Decimal('2.7'))
self.assertEqual(model.power_clamp_reference.typical, Decimal('3'))
self.assertEqual(model.power_clamp_reference.maximum, Decimal('3.3'))
self.assertEqual(model.gnd_clamp_reference.minimum, Decimal('-0.5'))
self.assertEqual(model.gnd_clamp_reference.typical, Decimal('0'))
self.assertEqual(model.gnd_clamp_reference.maximum, Decimal('0.5'))
logging.basicConfig(level=logging.DEBUG)
| 52.172031
| 91
| 0.603378
| 5,946
| 47,007
| 4.64514
| 0.079381
| 0.249819
| 0.154091
| 0.159884
| 0.913903
| 0.905033
| 0.859631
| 0.803259
| 0.748371
| 0.676249
| 0
| 0.088417
| 0.221648
| 47,007
| 900
| 92
| 52.23
| 0.666475
| 0.012339
| 0
| 0.455939
| 0
| 0
| 0.133016
| 0.007889
| 0
| 0
| 0
| 0
| 0.591315
| 1
| 0.015326
| false
| 0
| 0.006386
| 0
| 0.024266
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6dc2e815b5f17edded2801aa00856d24612570cd
| 46
|
py
|
Python
|
cdp/__init__.py
|
ciholas/cdp-py
|
e0b67b5e606449ee65adfb949dbedbd71bef0823
|
[
"CC-BY-4.0"
] | 1
|
2021-10-18T13:40:17.000Z
|
2021-10-18T13:40:17.000Z
|
cdp/__init__.py
|
ciholas/cdp-py
|
e0b67b5e606449ee65adfb949dbedbd71bef0823
|
[
"CC-BY-4.0"
] | 1
|
2019-09-17T17:20:37.000Z
|
2019-09-20T15:17:23.000Z
|
cdp/__init__.py
|
ciholas/cdp-py
|
e0b67b5e606449ee65adfb949dbedbd71bef0823
|
[
"CC-BY-4.0"
] | null | null | null |
from cdp import *
from cdp.data_items import *
| 23
| 28
| 0.782609
| 8
| 46
| 4.375
| 0.625
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.152174
| 46
| 2
| 28
| 23
| 0.897436
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0979542620e95f96b90d2d68edc51a72503e08db
| 270
|
py
|
Python
|
python/src/graph/minimum_spanning_tree.py
|
Kinnoo/cs404.1-1
|
0fd2e2fbf02953eb1b2192945ab4107034399a68
|
[
"MIT"
] | null | null | null |
python/src/graph/minimum_spanning_tree.py
|
Kinnoo/cs404.1-1
|
0fd2e2fbf02953eb1b2192945ab4107034399a68
|
[
"MIT"
] | null | null | null |
python/src/graph/minimum_spanning_tree.py
|
Kinnoo/cs404.1-1
|
0fd2e2fbf02953eb1b2192945ab4107034399a68
|
[
"MIT"
] | null | null | null |
class MinimumSpanningTree:
def __init__(self, graph):
# TODO: complete this function
pass
def edges(self):
# TODO: complete this function
return None
def weight(self):
# TODO: complete this function
return 0
| 20.769231
| 38
| 0.6
| 29
| 270
| 5.448276
| 0.551724
| 0.227848
| 0.303797
| 0.455696
| 0.43038
| 0.43038
| 0
| 0
| 0
| 0
| 0
| 0.005587
| 0.337037
| 270
| 12
| 39
| 22.5
| 0.877095
| 0.318519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 1
| 0.428571
| false
| 0.142857
| 0
| 0.285714
| 0.857143
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 9
|
09dfb7e286e05d22021f2c60a60208d952ed7f2e
| 13,787
|
py
|
Python
|
mysql_helper.py
|
myjxm/mmsegmentation
|
0672643d85b193451e1a980b54bfc9633a94b2f6
|
[
"Apache-2.0"
] | null | null | null |
mysql_helper.py
|
myjxm/mmsegmentation
|
0672643d85b193451e1a980b54bfc9633a94b2f6
|
[
"Apache-2.0"
] | null | null | null |
mysql_helper.py
|
myjxm/mmsegmentation
|
0672643d85b193451e1a980b54bfc9633a94b2f6
|
[
"Apache-2.0"
] | null | null | null |
#coding=utf-8
from flask import Flask,jsonify,request
import pymysql
import datetime
import logging
import json
app = Flask(__name__)
MYSQL_CONN_CONF = {'user':'root','password':'123456','host':'localhost','database':'paper_exp'}
def make_response(code, message, data=[]):
return {"code": code, "message": message, "data": data}
def mysql_query(sql_cmd,conn_conf=MYSQL_CONN_CONF):
result=();
try:
conn = pymysql.connect(**conn_conf )
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
return make_response(-1, str(e))
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(sql_cmd)
result = cursor.fetchall()
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
return make_response(-1, str(e))
finally:
cursor.close()
conn.commit()
conn.close()
message = "execute '" + sql_cmd + "' successfully"
return make_response(0, message, result)
def mysql_query_new(sql_cmd,conn_conf=MYSQL_CONN_CONF):
result=();
try:
conn = pymysql.connect(**conn_conf )
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
return make_response(-1, e)
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(sql_cmd)
result = cursor.fetchall()
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
return make_response(-1, e)
finally:
cursor.close()
conn.commit()
conn.close()
message = "execute '" + sql_cmd + "' successfully"
return make_response(0, message, result)
def mysql_update(sql_cmd,conn_conf=MYSQL_CONN_CONF):
try:
conn = pymysql.connect(**conn_conf )
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
return make_response(-1, e)
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
cursor = conn.cursor(cursor=pymysql.cursors.DictCursor)
try:
cursor.execute(sql_cmd)
except pymysql.Error as e:
logging.error("connect fails!:{}".format(e))
return make_response(-1, e)
#logging.info("<!--:PRINT{O_RTNCOD:-1,O_RTNMSG:'" + 'connect fails!{}'.format(e) + "'}-->")
finally:
cursor.close()
conn.commit()
conn.close()
message = "execute '" + sql_cmd + "' successfully"
return make_response(0, message)
@app.route('/init_insert/performance/<modelname>', methods=['post','GET'])
def init_insert_performance(modelname):
sql_cmd="insert into performance(modelname,create_date,test_batch_status) values('" + modelname + "','" + datetime.datetime.now().strftime("%Y-%m-%d") + "','I');"
return jsonify(mysql_query(sql_cmd))
@app.route('/init_insert/statistic_class/<modelname>/<roc>/<dataset>', methods=['post','GET'])
def init_insert_statistic_class(modelname,roc,dataset):
sql_cmd="insert into statistic_class(model_name,roc_threshold,create_date,metric_status,dataset) values('" + modelname + "','" + str(roc) + "','" +datetime.datetime.now().strftime("%Y-%m-%d") + "','I','" + dataset +"');"
res = mysql_query(sql_cmd)
return jsonify(res)
@app.route('/query/test_batch_status/<modelname>', methods=['post','GET'])
def query_test_batch_status(modelname):
sql_cmd = "select test_batch_status from performance where modelname = '" + modelname + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/query/statistic_status/<modelname>/<roc>/<dataset>', methods=['post','GET'])
def query_statistic_status(modelname,roc,dataset):
sql_cmd = "select metric_status from statistic_class where model_name = '" + modelname + "' and roc_threshold = '" + str(roc) + "' and dataset = '" + dataset + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/test_batch_status/<modelname>', methods=['post','GET'])
def update_test_batch_status(modelname):
sql_cmd = "update performance set test_batch_status='Y' where modelname = '" + modelname + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/statistic_status/<modelname>/<roc>/<dataset>', methods=['post','GET'])
def update_statistic_status(modelname,roc,dataset):
sql_cmd = "update statistic_class set metric_status='Y' where model_name = '" + modelname + "' and roc_threshold = '" + str(roc) + "' and dataset = '" + dataset + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/performance/gflops/<modelname>/<gflops>/<params>', methods=['post','GET'])
def update_gflops(modelname,gflops,params):
sql_cmd = "update performance set gflops='" + str(gflops) + "',params_M='" + str(params) + "' where modelname = '" + modelname + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/performance/fps/<modelname>/<fps>', methods=['post','GET'])
def update_fps(modelname,fps):
sql_cmd = "update performance set fps='" + str(fps) + "' where modelname = '" + modelname + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/statistic_two_class/', methods=['post','GET'])
def update_two_statistic():
data = request.stream.read()
datadict = json.loads(data)
dataset = datadict['dataset']
model_name = datadict['modelname']
roc_threshold = datadict['roc']
aAcc = datadict['aAcc']
mioU = datadict['mIoU']
macc = datadict['mAcc']
mFscore = datadict['mFscore']
mPrecision = datadict['mPrecision']
mRecall = datadict['mRecall']
mfpr = datadict['mfpr']
mfnr = datadict['mfnr']
mkappa = datadict['mkappa']
mmcc = datadict['mmcc']
mhloss = datadict['mhloss']
grmse = datadict.get('Grmse','err')
gmax = datadict.get('Gmax','err')
other_iou = datadict['IoU.other']
water_iou = datadict['IoU.water']
other_acc = datadict['Acc.other']
water_acc = datadict['Acc.water']
other_Fscore = datadict['Fscore.other']
water_Fscore = datadict['Fscore.water']
other_Precision = datadict['Precision.other']
water_Precision = datadict['Precision.water']
other_Recall = datadict['Recall.other']
water_Recall = datadict['Recall.water']
other_fpr = datadict['fpr.other']
water_fpr = datadict['fpr.water']
other_fnr = datadict['fnr.other']
water_fnr = datadict['fnr.water']
other_kappa = datadict['kappa.other']
water_kappa = datadict['kappa.water']
other_mcc = datadict['mcc.other']
water_mcc = datadict['mcc.water']
other_hloss = datadict['hloss.other']
water_hloss = datadict['hloss.water']
sql_cmd = "update statistic_class set " \
"aAcc='" + str(aAcc) + "', " \
"mioU='" + str(mioU) + "', " \
"macc='" + str(macc) + "', " \
"mFscore='" + str(mFscore) + "', " \
"mPrecision='" + str(mPrecision) + "', " \
"mRecall='" + str(mRecall) + "', " \
"mfpr='" + str(mfpr) + "', " \
"mfnr='" + str(mfnr) + "', " \
"mkappa='" + str(mkappa) + "', " \
"mmcc='" + str(mmcc) + "', " \
"mhloss='" + str(mhloss) + "', " \
"grmse='" + str(grmse) + "', " \
"gmax='" + str(gmax) + "', " \
"other_iou='" + str(other_iou) + "', " \
"water_iou='" + str(water_iou) + "', " \
"other_acc='" + str(other_acc) + "', " \
"water_acc='" + str(water_acc) + "', " \
"other_Fscore='" + str(other_Fscore) + "', " \
"water_Fscore='" + str(water_Fscore) + "', " \
"other_Precision='" + str(other_Precision) + "', " \
"water_Precision='" + str(water_Precision) + "', " \
"other_Recall='" + str(other_Recall) + "', " \
"water_Recall='" + str(water_Recall) + "', " \
"other_fpr='" + str(other_fpr) + "', " \
"water_fpr='" + str(water_fpr) + "', " \
"other_fnr='" + str(other_fnr) + "', " \
"water_fnr='" + str(water_fnr) + "', " \
"other_kappa='" + str(other_kappa) + "', " \
"water_kappa='" + str(water_kappa) + "', " \
"other_mcc='" + str(other_mcc) + "', " \
"water_mcc='" + str(water_mcc) + "', " \
"other_hloss='" + str(other_hloss) + "', " \
"water_hloss='" + str(water_hloss) + "' " \
" where model_name = '" + model_name + "' and roc_threshold = '" + str(roc_threshold) + "' and dataset = '" + dataset + "';"
return jsonify(mysql_query(sql_cmd))
@app.route('/update/statistic_three_class/', methods=['post','GET'])
def update_three_statistic():
data = request.stream.read()
datadict = json.loads(data)
dataset = datadict['dataset']
model_name = datadict['modelname']
roc_threshold = datadict['roc']
aAcc = datadict['aAcc']
mioU = datadict['mIoU']
macc = datadict['mAcc']
mFscore = datadict['mFscore']
mPrecision = datadict['mPrecision']
mRecall = datadict['mRecall']
mfpr = datadict['mfpr']
mfnr = datadict['mfnr']
mkappa = datadict['mkappa']
mmcc = datadict['mmcc']
mhloss = datadict['mhloss']
grmse = datadict.get('Grmse','err')
gmax = datadict.get('Gmax','err')
other_iou = datadict['IoU.other']
water_iou = datadict['IoU.water']
sky_iou = datadict['IoU.sky']
other_acc = datadict['Acc.other']
water_acc = datadict['Acc.water']
sky_acc = datadict['Acc.sky']
other_Fscore = datadict['Fscore.other']
water_Fscore = datadict['Fscore.water']
sky_Fscore = datadict['Fscore.sky']
other_Precision = datadict['Precision.other']
water_Precision = datadict['Precision.water']
sky_Precision = datadict['Precision.sky']
other_Recall = datadict['Recall.other']
water_Recall = datadict['Recall.water']
sky_Recall = datadict['Recall.sky']
other_fpr = datadict['fpr.other']
water_fpr = datadict['fpr.water']
sky_fpr = datadict['fpr.sky']
other_fnr = datadict['fnr.other']
water_fnr = datadict['fnr.water']
sky_fnr = datadict['fnr.sky']
other_kappa = datadict['kappa.other']
water_kappa = datadict['kappa.water']
sky_kappa = datadict['kappa.sky']
other_mcc = datadict['mcc.other']
water_mcc = datadict['mcc.water']
sky_mcc = datadict['mcc.sky']
other_hloss = datadict['hloss.other']
water_hloss = datadict['hloss.water']
sky_hloss = datadict['hloss.sky']
sql_cmd = "update statistic_class set " \
"aAcc='" + str(aAcc) + "', " \
"mioU='" + str(mioU) + "', " \
"macc='" + str(macc) + "', " \
"mFscore='" + str(mFscore) + "', " \
"mPrecision='" + str(mPrecision) + "', " \
"mRecall='" + str(mRecall) + "', " \
"mfpr='" + str(mfpr) + "', " \
"mfnr='" + str(mfnr) + "', " \
"mkappa='" + str(mkappa) + "', " \
"mmcc='" + str(mmcc) + "', " \
"mhloss='" + str(mhloss) + "', " \
"grmse='" + str(grmse) + "', " \
"gmax='" + str(gmax) + "', " \
"other_iou='" + str(other_iou) + "', " \
"water_iou='" + str(water_iou) + "', " \
"sky_iou='" + str(sky_iou) + "', " \
"other_acc='" + str(other_acc) + "', " \
"water_acc='" + str(water_acc) + "', " \
"sky_acc='" + str(sky_acc) + "', " \
"other_Fscore='" + str(other_Fscore) + "', " \
"water_Fscore='" + str(water_Fscore) + "', " \
"sky_Fscore='" + str(sky_Fscore) + "', " \
"other_Precision='" + str(other_Precision) + "', " \
"water_Precision='" + str(water_Precision) + "', " \
"sky_Precision='" + str(sky_Precision) + "', " \
"other_Recall='" + str(other_Recall) + "', " \
"water_Recall='" + str(water_Recall) + "', " \
"sky_Recall='" + str(sky_Recall) + "', " \
"other_fpr='" + str(other_fpr) + "', " \
"water_fpr='" + str(water_fpr) + "', " \
"sky_fpr='" + str(sky_fpr) + "', " \
"other_fnr='" + str(other_fnr) + "', " \
"water_fnr='" + str(water_fnr) + "', " \
"sky_fnr='" + str(sky_fnr) + "', " \
"other_kappa='" + str(other_kappa) + "', " \
"water_kappa='" + str(water_kappa) + "', " \
"sky_kappa='" + str(sky_kappa) + "', " \
"other_mcc='" + str(other_mcc) + "', " \
"water_mcc='" + str(water_mcc) + "', " \
"sky_mcc='" + str(sky_mcc) + "', " \
"other_hloss='" + str(other_hloss) + "', " \
"water_hloss='" + str(water_hloss) + "', " \
"sky_hloss='" + str(sky_hloss) + "' " \
" where model_name = '" + model_name + "' and roc_threshold = '" + str(roc_threshold) + "' and dataset = '" + dataset + "';"
return jsonify(mysql_query(sql_cmd))
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080)
logging.info("______________Start rest_server____________")
logging.info("开始")
| 42.552469
| 224
| 0.560456
| 1,484
| 13,787
| 4.979111
| 0.095687
| 0.023549
| 0.029233
| 0.030857
| 0.835837
| 0.815672
| 0.791041
| 0.76262
| 0.738124
| 0.725538
| 0
| 0.002898
| 0.249148
| 13,787
| 323
| 225
| 42.684211
| 0.710877
| 0.040038
| 0
| 0.75
| 0
| 0
| 0.254423
| 0.044912
| 0
| 0
| 0
| 0
| 0
| 1
| 0.050725
| false
| 0.003623
| 0.018116
| 0.003623
| 0.141304
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61f9cbd18aca271b8ec72c93042b47bdeebee5e2
| 183
|
py
|
Python
|
graph4nlp/pytorch/modules/prediction/classification/__init__.py
|
cminusQAQ/graph4nlp
|
d980e897131f1b9d3766750c06316d94749904fa
|
[
"Apache-2.0"
] | 1,269
|
2021-06-06T03:27:41.000Z
|
2022-03-30T06:33:53.000Z
|
graph4nlp/pytorch/modules/prediction/classification/__init__.py
|
cminusQAQ/graph4nlp
|
d980e897131f1b9d3766750c06316d94749904fa
|
[
"Apache-2.0"
] | 106
|
2021-06-07T05:24:01.000Z
|
2022-03-31T19:18:48.000Z
|
graph4nlp/pytorch/modules/prediction/classification/__init__.py
|
cminusQAQ/graph4nlp
|
d980e897131f1b9d3766750c06316d94749904fa
|
[
"Apache-2.0"
] | 160
|
2021-06-06T15:09:17.000Z
|
2022-03-23T02:06:33.000Z
|
from . import graph_classification, kg_completion, link_prediction, node_classification
__all__ = ["graph_classification", "kg_completion", "link_prediction", "node_classification"]
| 45.75
| 93
| 0.825137
| 19
| 183
| 7.315789
| 0.526316
| 0.273381
| 0.302158
| 0.446043
| 0.906475
| 0.906475
| 0.906475
| 0.906475
| 0
| 0
| 0
| 0
| 0.076503
| 183
| 3
| 94
| 61
| 0.822485
| 0
| 0
| 0
| 0
| 0
| 0.36612
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
61fac7007159c36770555d776caf56822ffba990
| 327
|
py
|
Python
|
mhkit/dolfyn/io/__init__.py
|
akeeste/MHKiT-Python
|
db25af603547b764b2f85e957a5b0621cea5fd10
|
[
"BSD-3-Clause"
] | 3
|
2020-01-15T16:21:26.000Z
|
2020-01-28T17:10:13.000Z
|
mhkit/dolfyn/io/__init__.py
|
akeeste/MHKiT-Python
|
db25af603547b764b2f85e957a5b0621cea5fd10
|
[
"BSD-3-Clause"
] | null | null | null |
mhkit/dolfyn/io/__init__.py
|
akeeste/MHKiT-Python
|
db25af603547b764b2f85e957a5b0621cea5fd10
|
[
"BSD-3-Clause"
] | 4
|
2020-01-15T16:24:04.000Z
|
2020-01-15T20:45:22.000Z
|
from mhkit.dolfyn.io import _read_bin
from mhkit.dolfyn.io import api
from mhkit.dolfyn.io import base
from mhkit.dolfyn.io import nortek_defs
from mhkit.dolfyn.io import nortek
from mhkit.dolfyn.io import nortek2_defs
from mhkit.dolfyn.io import nortek2_lib
from mhkit.dolfyn.io import nortek2
from mhkit.dolfyn.io import rdi
| 32.7
| 40
| 0.834862
| 58
| 327
| 4.62069
| 0.241379
| 0.302239
| 0.503731
| 0.570896
| 0.925373
| 0.58209
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.110092
| 327
| 9
| 41
| 36.333333
| 0.910653
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
11132d9442b93769d575a4cb162c6bfc301fec8d
| 36,968
|
py
|
Python
|
lib/risksense_api/__subject/__workflows/__workflows.py
|
YugeshJ/risksense_tools
|
975d9d2e723791aa414ecb69e6f66f4cb20cb3f1
|
[
"Apache-2.0"
] | null | null | null |
lib/risksense_api/__subject/__workflows/__workflows.py
|
YugeshJ/risksense_tools
|
975d9d2e723791aa414ecb69e6f66f4cb20cb3f1
|
[
"Apache-2.0"
] | null | null | null |
lib/risksense_api/__subject/__workflows/__workflows.py
|
YugeshJ/risksense_tools
|
975d9d2e723791aa414ecb69e6f66f4cb20cb3f1
|
[
"Apache-2.0"
] | 1
|
2022-03-31T06:24:35.000Z
|
2022-03-31T06:24:35.000Z
|
""" *******************************************************************************************************************
|
| Name : __workflows.py
| Description : Create functions for various utilities of the workflow endpoints
| Project : risksense_api
| Copyright : 2022 RiskSense, Inc.
| License : Apache-2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
|
******************************************************************************************************************* """
import json
from ...__subject import Subject
from ..._params import *
from ..._api_request_handler import *
import datetime
class Workflows(Subject):
""" Workflows class """
def __init__(self, profile):
"""
Initialization of Workflows object.
:param profile: Profile Object
:type profile: _profile
"""
self.subject_name = "workflowBatch"
Subject.__init__(self, profile, self.subject_name)
def search(self, search_filters, projection=Projection.BASIC, page_size=150,
sort_field=SortField.ID, sort_dir=SortDirection.ASC, client_id=None):
"""
Searches for and returns hosts based on the provided filter(s) and other parameters. Rather
than returning paginated results, this function cycles through all pages of results and returns
them all in a single list.
:param search_filters: A list of dictionaries containing filter parameters.
:type search_filters: list
:param projection: Projection to be used in API request. Projection.BASIC or Projection.DETAIL
:type projection: Projection attribute
:param page_size: The number of results per page to be returned.
:type page_size: int
:param sort_field: The field to be used for sorting results returned.
:type sort_field: SortField attribute
:param sort_dir: The direction of sorting to be used. SortDirection.ASC or SortDirection.DESC
:type sort_dir: SortDirection attribute
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: A list containing all hosts returned by the search using the filter provided.
:rtype: list
:raises RequestFailed:
:raises Exception:
"""
func_args = locals()
func_args.pop('self')
if client_id is None:
client_id, func_args['client_id'] = self._use_default_client_id()
try:
page_info = self._get_page_info(self.subject_name, search_filters, page_size=page_size, client_id=client_id)
num_pages = page_info[1]
except RequestFailed:
raise
page_range = range(0, num_pages)
try:
all_results = self._search(self.subject_name, self.get_single_search_page, page_range, **func_args)
except (RequestFailed, Exception):
raise
return all_results
def get_single_search_page(self, search_filters, projection=Projection.BASIC, page_num=0, page_size=150,
sort_field=SortField.ID, sort_dir=SortDirection.ASC, client_id=None):
"""
Searches for and returns hosts based on the provided filter(s) and other parameters.
:param search_filters: A list of dictionaries containing filter parameters.
:type search_filters: list
:param projection: Projection to be used in API request. Projection.BASIC or Projection.DETAIL
:type projection: Projection attribute
:param page_num: The page number of results to be returned.
:type page_num: int
:param page_size: The number of results per page to be returned.
:type page_size: int
:param sort_field: The field to be used for sorting results returned.
:type sort_field: SortField attribute
:param sort_dir: The direction of sorting to be used. SortDirection.ASC or SortDirection.DESC
:type sort_dir: SortDirection attribute
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The JSON response from the platform is returned.
:rtype: dict
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
url = self.api_base_url.format(str(client_id)) + "/search"
body = {
"filters": search_filters,
"projection": projection,
"sort": [
{
"field": sort_field,
"direction": sort_dir
}
],
"page": page_num,
"size": page_size
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
return jsonified_response
def get_model(self, client_id=None):
"""
Get available projections and models for Networks.
:param client_id: Client ID
:type client_id: int
:return: Networks projections and models are returned.
:rtype: dict
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
try:
response = self._model(self.subject_name, client_id)
except RequestFailed:
raise
return response
def suggest(self, search_filter_1, search_filter_2, client_id=None):
"""
Suggest values for filter fields.
:param search_filter_1: Search Filter 1
:type search_filter_1: list
:param search_filter_2: Search Filter 2
:type search_filter_2: list
:param client_id: Client ID
:type client_id: int
:return: Value suggestions
:rtype: list
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
try:
response = self._suggest(self.subject_name, search_filter_1, search_filter_2, client_id)
except RequestFailed:
raise
return response
def request_acceptance(self, finding_type, search_filter, workflow_name, description, reason, override_control, compensating_controls="NONE", expiration_date=None, attachment=None, client_id=None):
"""
Request acceptance for applicationFindings / hostfFindings as defined in the filter_request parameter.
:param finding_type: Finding type. Possible options : ("hostFinding" or "applicationFinding")
:type finding_type: str
:param search_filter: A list of dictionaries containing filter parameters.
:type search_filter: list
:param workflow_name: Workflow Name
:type workflow_name: str
:param description: A description of the request.
:type description: str
:param reason: A reason for the request.
:type reason: str
:param override_control: A description of override controls applied to this finding. Option available : ('NONE', 'AUTHORIZED')
:type override_control: str
:param compensating_controls: A description of compensating controls applied to this finding. Option available : ("DLP", "Deemed not exploitable", "Endpoint Security", "IDS/IPS", "MFA Enforced", "Multiple: See Description", "Network Firewall", "Network Segmentation", "Other: See Description", "Web Application Firewall" or "NONE")
:type compensating_controls: str
:param expiration_date: An expiration date. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param attachment: A path to a file to be uploaded and attached to the request.
:type attachment: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID within the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
url = self.api_base_url.format(str(client_id)) + "/acceptance/request"
body = {"subject": finding_type,
"filterRequest": {
"filters": search_filter
}
}
multiform_data = {
"name": (None,workflow_name),
"subjectFilterRequest": (None,json.dumps(body)),
"description": (None,description),
"reason": (None,reason),
"overrideControl": (None,override_control),
"compensatingControls": (None,compensating_controls),
"files": attachment,
"expirationDate": (None,expiration_date),
"isEmptyWorkflow":(None,"false")
}
body = self._strip_nones_from_dict(body)
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, files=multiform_data)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def request_false_positive(self, finding_type, search_filter, workflow_name, description, reason, override_control, expiration_date=None, attachment=None, client_id=None):
"""
Request false positive for applicationFindings / hostFindings as defined in the filter_request parameter.
:param finding_type: Finding type. Possible options : ("hostFinding" or "applicationFinding")
:type finding_type: str
:param search_filter: A list of dictionaries containing filter parameters.
:type search_filter: list
:param workflow_name: Workflow Name
:type workflow_name: str
:param description: A description of the request.
:type description: str
:param reason: A reason for the request.
:type reason: str
:param override_control: A description of override controls applied to this finding. Option available : ('NONE', 'AUTHORIZED')
:type override_control: str
:param expiration_date: An expiration date. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param attachment: A path to a file to be uploaded and attached to the request.
:type attachment: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID within the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
url = self.api_base_url.format(str(client_id)) + "/falsePositive/request"
body = {"subject": finding_type,
"filterRequest": {
"filters": search_filter
}
}
multiform_data = {
"name": (None,workflow_name),
"subjectFilterRequest": (None,json.dumps(body)),
"description": (None,description),
"reason": (None,reason),
"overrideControl": (None,override_control),
"files": attachment,
"expirationDate": (None,expiration_date),
"isEmptyWorkflow":(None,"false")
}
body = self._strip_nones_from_dict(body)
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, files=multiform_data)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def request_remediation(self, finding_type, search_filter, workflow_name, description, reason, override_control, expiration_date=None, attachment=None, client_id=None):
"""
Request remediation for applicationFindings / hostFindings as defined in the filter_request parameter.
:param finding_type: Finding type. Possible options : ("hostFinding" or "applicationFinding")
:type finding_type: str
:param search_filter: A list of dictionaries containing filter parameters.
:type search_filter: list
:param workflow_name: Workflow Name
:type workflow_name: str
:param description: A description of the request.
:type description: str
:param reason: A reason for the request.
:type reason: str
:param override_control: A description of override controls applied to this finding. Option available : ('NONE', 'AUTHORIZED')
:type override_control: str
:param expiration_date: An expiration date. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param attachment: A path to a file to be uploaded and attached to the request.
:type attachment: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID within the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
url = self.api_base_url.format(str(client_id)) + "/remediation/request"
body = {"subject": finding_type,
"filterRequest": {
"filters": search_filter
}
}
multiform_data = {
"name": (None,workflow_name),
"subjectFilterRequest": (None,json.dumps(body)),
"description": (None,description),
"reason": (None,reason),
"overrideControl": (None,override_control),
"files": attachment,
"expirationDate": (None,expiration_date),
"isEmptyWorkflow":(None,"false")
}
body = self._strip_nones_from_dict(body)
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, files=multiform_data)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def request_severity_change(self, finding_type, search_filter, workflow_name, description, reason, override_control, severity_change, expiration_date=None, attachment=None, client_id=None):
"""
Request severity change for applicationFindings / hostFindings as defined in the filter_request parameter.
:param finding_type: Finding type. Possible options : ("hostFinding" or "applicationFinding")
:type finding_type: str
:param search_filter: A list of dictionaries containing filter parameters.
:type search_filter: list
:param workflow_name: Workflow Name
:type workflow_name: str
:param description: A description of the request.
:type description: str
:param reason: A reason for the request.
:type reason: str
:param override_control: A description of override controls applied to this finding. Option available : ('NONE', 'AUTHORIZED')
:type override_control: str
:param compensating_controls: Severity change for this finding. Option available : ("1" to "10")
:type compensating_controls: str
:param expiration_date: An expiration date. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param attachment: A path to a file to be uploaded and attached to the request.
:type attachment: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID within the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
url = self.api_base_url.format(str(client_id)) + "/severityChange/request"
body = {"subject": finding_type,
"filterRequest": {
"filters": search_filter
}
}
multiform_data = {
"name": (None,workflow_name),
"subjectFilterRequest": (None,json.dumps(body)),
"description": (None,description),
"reason": (None,reason),
"overrideControl": (None,override_control),
"severity": (None,severity_change),
"files": attachment,
"expirationDate": (None,expiration_date),
"isEmptyWorkflow":(None,"false")
}
body = self._strip_nones_from_dict(body)
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, files=multiform_data)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def reject_acceptance(self, filter_request, description, client_id=None):
"""
Reject an acceptance request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rejection.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/acceptance/reject"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def reject_false_positive(self, filter_request, description, client_id=None):
"""
Reject a false positive request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rejection.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/falsePositive/reject"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def reject_remediation(self, filter_request, description, client_id=None):
"""
Reject a remediation request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rejection.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/remediation/reject"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def reject_severity_change(self, filter_request, description, client_id=None):
"""
Reject a severity change request.
param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rejection.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/severityChange/reject"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def rework_acceptance(self, filter_request, description, client_id=None):
"""
Request a rework of an acceptance.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rework.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/acceptance/rework"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def rework_false_positive(self, filter_request, description, client_id=None):
"""
Request a rework of a false positive.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rework.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/falsePositive/rework"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def rework_remediation(self, filter_request, description, client_id=None):
"""
Request a rework of a remediation.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rework.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/remediation/rework"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def rework_severity_change(self, filter_request, description, client_id=None):
"""
Request a rework of a severity change.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param description: A description of the rework.
:type description: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/severityChange/rework"
body = {
"workflowBatchUuid":uuid,
"description":description
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def approve_acceptance(self, filter_request, override_exp_date=False,
expiration_date=(datetime.date.today() + datetime.timedelta(days=14)), client_id=None):
"""
Approve a acceptance request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param override_exp_date: True/False indicating whether or not an expiration date should be overridden.
:type override_exp_date: bool
:param expiration_date: An expiration date for the approval. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
print(uuid)
url = self.api_base_url.format(str(client_id)) + "/acceptance/approve"
body = {
"workflowBatchUuid": uuid,
"expirationDate": str(expiration_date),
"overrideExpirationDate": override_exp_date
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def approve_false_positive(self, filter_request, override_exp_date=False,
expiration_date=(datetime.date.today() + datetime.timedelta(days=14)), client_id=None):
"""
Approve a false positive change request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param override_exp_date: True/False indicating whether or not an expiration date should be overridden.
:type override_exp_date: bool
:param expiration_date: An expiration date for the approval. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/falsePositive/approve"
body = {
"workflowBatchUuid": uuid,
"expirationDate": str(expiration_date),
"overrideExpirationDate": override_exp_date
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def approve_remediation(self, filter_request, override_exp_date=False,
expiration_date=(datetime.date.today() + datetime.timedelta(days=14)), client_id=None):
"""
Approve a remediation request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param override_exp_date: True/False indicating whether or not an expiration date should be overridden.
:type override_exp_date: bool
:param expiration_date: An expiration date for the approval. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/remediation/approve"
body = {
"workflowBatchUuid": uuid,
"expirationDate": str(expiration_date),
"overrideExpirationDate": override_exp_date
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
def approve_severity_change(self, filter_request, override_exp_date=False,
expiration_date=(datetime.date.today() + datetime.timedelta(days=14)), client_id=None):
"""
Approve a severity change request.
:param filter_request: A list of dictionaries containing filter parameters.
:type filter_request: list
:param override_exp_date: True/False indicating whether or not an expiration date should be overridden.
:type override_exp_date: bool
:param expiration_date: An expiration date for the approval. Should be in "YYYY-MM-DD" format.
:type expiration_date: str
:param client_id: Client ID. If an ID isn't passed, will use the profile's default Client ID.
:type client_id: int
:return: The job ID from the platform is returned.
:rtype: int
:raises RequestFailed:
"""
if client_id is None:
client_id = self._use_default_client_id()[0]
search_response = self.get_single_search_page(filter_request)
uuid = search_response['_embedded']['workflowBatches'][0]['uuid']
url = self.api_base_url.format(str(client_id)) + "/severityChange/approve"
body = {
"workflowBatchUuid": uuid,
"expirationDate": str(expiration_date),
"overrideExpirationDate": override_exp_date
}
try:
raw_response = self.request_handler.make_request(ApiRequestHandler.POST, url, body=body)
except RequestFailed:
raise
jsonified_response = json.loads(raw_response.text)
job_id = jsonified_response['id']
return job_id
"""
Copyright 2022 RiskSense, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
| 35.207619
| 341
| 0.604333
| 4,061
| 36,968
| 5.309776
| 0.069441
| 0.066781
| 0.026434
| 0.015582
| 0.887307
| 0.882901
| 0.882113
| 0.870844
| 0.859435
| 0.845754
| 0
| 0.003029
| 0.312432
| 36,968
| 1,050
| 342
| 35.207619
| 0.845306
| 0.419092
| 0
| 0.752577
| 0
| 0
| 0.094174
| 0.014388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054124
| false
| 0
| 0.012887
| 0
| 0.121134
| 0.002577
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
111ea89045cd4e24fcc858398030b1e704a47773
| 103,303
|
py
|
Python
|
pykoop/lmi_regressors.py
|
decarsg/pykoop
|
6a8b7c83bdc7de3419e2fac48c1035fa06966e24
|
[
"MIT"
] | 9
|
2021-10-18T21:49:46.000Z
|
2022-01-31T16:06:01.000Z
|
pykoop/lmi_regressors.py
|
decarsg/pykoop
|
6a8b7c83bdc7de3419e2fac48c1035fa06966e24
|
[
"MIT"
] | 6
|
2021-10-19T18:02:08.000Z
|
2022-03-31T18:45:37.000Z
|
pykoop/lmi_regressors.py
|
decarsg/pykoop
|
6a8b7c83bdc7de3419e2fac48c1035fa06966e24
|
[
"MIT"
] | 1
|
2022-03-08T14:59:33.000Z
|
2022-03-08T14:59:33.000Z
|
"""Collection of experimental LMI-based Koopman regressors from [lmikoop]_.
Warning
-------
Importing this module has side effects! When imported, the module creates a
temporary directory with the prefix ``pykoop_``, which is used to memoize long
computations that may be repreated frequently. It also catches ``SIGINT`` so
that long regressions can be stopped politely.
"""
import logging
import signal
import tempfile
from typing import Any, Dict, List, Optional, Tuple, Union
import joblib
import numpy as np
import picos
import scipy.signal
import sklearn.base
import sklearn.utils.metaestimators
from scipy import linalg
from . import koopman_pipeline, regressors, tsvd
# Create logger
log = logging.getLogger(__name__)
# Create temporary cache directory for memoized computations
_cachedir = tempfile.TemporaryDirectory(prefix='pykoop_')
log.info(f'Temporary directory created at `{_cachedir.name}`')
memory = joblib.Memory(_cachedir.name, verbose=0)
# Create signal handler to politely stop computations
polite_stop = False
def _sigint_handler(sig, frame):
"""Signal handler for ^C."""
global polite_stop
if not polite_stop:
print('Stop requested. Regression will stop safely at next iteration. '
'Press ^C again to force quit.')
polite_stop = True
else:
print('Force quitting now.')
exit()
signal.signal(signal.SIGINT, _sigint_handler)
class LmiRegressor(koopman_pipeline.KoopmanRegressor):
"""Base class for LMI regressors.
For derivations of LMIs, see [lmikoop]_.
This base class is mostly used to share common ``scikit-learn`` parameters.
Attributes
----------
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
"""
# Default solver parameters
_default_solver_params: Dict[str, Any] = {
'primals': None,
'duals': None,
'dualize': True,
'abs_bnb_opt_tol': None,
'abs_dual_fsb_tol': None,
'abs_ipm_opt_tol': None,
'abs_prim_fsb_tol': None,
'integrality_tol': None,
'markowitz_tol': None,
'rel_bnb_opt_tol': None,
'rel_dual_fsb_tol': None,
'rel_ipm_opt_tol': None,
'rel_prim_fsb_tol': None,
}
# Override since PICOS only works with ``float64``.
_check_X_y_params: Dict[str, Any] = {
'multi_output': True,
'y_numeric': True,
'dtype': 'float64',
}
def _more_tags(self):
reason = ('Hard to guarantee exact idempotence when calling external '
'solver.')
return {
'multioutput': True,
'multioutput_only': True,
'_xfail_checks': {
'check_fit_idempotent': reason,
}
}
class LmiEdmd(LmiRegressor):
"""LMI-based EDMD with regularization.
Supports Tikhonov regularization, optionally mixed with matrix two-norm
regularization or nuclear norm regularization.
Attributes
----------
alpha_tikhonov_ : float
Tikhonov regularization coefficient used.
alpha_other_ : float
Matrix two norm or nuclear norm regularization coefficient used.
tsvd_ : pykoop.Tsvd
Fit truncated SVD object.
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
LMI EDMD without regularization
>>> kp = pykoop.KoopmanPipeline(regressor=pykoop.lmi_regressors.LmiEdmd())
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmd())
LMI EDMD with Tikhonov regularization
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmd(
... alpha=1,
... reg_method='tikhonov',
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmd(alpha=1))
LMI EDMD with matrix two-norm regularization
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmd(
... alpha=1,
... reg_method='twonorm',
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmd(alpha=1, reg_method='twonorm'))
LMI EDMD with mixed Tikhonov and squared-nuclear-norm regularization
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmd(
... alpha=1,
... ratio=0.5,
... reg_method='nuclear',
... square_norm=True,
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmd(alpha=1, ratio=0.5, reg_method='nuclear',
square_norm=True))
"""
def __init__(self,
alpha: float = 0,
ratio: float = 1,
reg_method: str = 'tikhonov',
inv_method: str = 'svd',
tsvd: tsvd.Tsvd = None,
square_norm: bool = False,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None) -> None:
"""Instantiate :class:`LmiEdmd`.
To disable regularization, use ``alpha=0`` paired with
``reg_method='tikhonov'``.
Parameters
----------
alpha : float
Regularization coefficient. Can only be zero if
``reg_method='tikhonov'``.
ratio : float
Ratio of matrix two-norm or nuclear norm to use in mixed
regularization. If ``ratio=1``, no Tikhonov regularization is
used. Cannot be zero. Ignored if ``reg_method='tikhonov'``.
reg_method : str
Regularization method to use. Possible values are
- ``'tikhonov'`` -- pure Tikhonov regularization (``ratio``
is ignored),
- ``'twonorm'`` -- matrix two-norm regularization mixed with
Tikhonov regularization, or
- ``'nuclear'`` -- nuclear norm regularization mixed with Tikhonov
regularization.
inv_method : str
Method to handle or avoid inversion of the ``H`` matrix when
forming the LMI problem. Possible values are
- ``'inv'`` -- invert ``H`` directly,
- ``'pinv'`` -- apply the Moore-Penrose pseudoinverse to ``H``,
- ``'eig'`` -- split ``H`` using an eigendecomposition,
- ``'ldl'`` -- split ``H`` using an LDL decomposition,
- ``'chol'`` -- split ``H`` using a Cholesky decomposition,
- ``'sqrt'`` -- split ``H`` using :func:`scipy.linalg.sqrtm()`, or
- ``'svd'`` -- split ``H`` using a singular value decomposition.
tsvd : pykoop.Tsvd()
Singular value truncation method if ``inv_method='svd'``. If
``None``, economy SVD is used.
square_norm : bool
Square norm in matrix two-norm or nuclear norm regularizer.
Enabling may increase computation time. Frobenius norm used in
Tikhonov regularizer is always squared.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.alpha = alpha
self.ratio = ratio
self.reg_method = reg_method
self.inv_method = inv_method
self.tsvd = tsvd
self.square_norm = square_norm
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Compute regularization coefficients
if self.reg_method == 'tikhonov':
self.alpha_tikhonov_ = self.alpha
self.alpha_other_ = 0.0
else:
self.alpha_tikhonov_ = self.alpha * (1.0 - self.ratio)
self.alpha_other_ = self.alpha * self.ratio
# Clone TSVD
self.tsvd_ = (sklearn.base.clone(self.tsvd)
if self.tsvd is not None else tsvd.Tsvd())
# Form optimization problem. Regularization coefficients must be scaled
# because of how G and H are defined.
q = X_unshifted.shape[0]
problem = self._create_base_problem(X_unshifted, X_shifted,
self.alpha_tikhonov_ / q,
self.inv_method, self.tsvd_,
self.picos_eps)
if self.reg_method == 'twonorm':
problem = _add_twonorm(problem, problem.variables['U'],
self.alpha_other_ / q, self.square_norm,
self.picos_eps)
elif self.reg_method == 'nuclear':
problem = _add_nuclear(problem, problem.variables['U'],
self.alpha_other_ / q, self.square_norm,
self.picos_eps)
# Solve optimization problem
problem.solve(**self.solver_params_)
# Save solution status
self.solution_status_ = problem.last_solution.claimedStatus
# Extract solution from ``Problem`` object
coef = self._extract_solution(problem)
return coef
def _validate_parameters(self) -> None:
# Check regularization methods
valid_reg_methods = ['tikhonov', 'twonorm', 'nuclear']
if self.reg_method not in valid_reg_methods:
raise ValueError('`reg_method` must be one of '
f'{valid_reg_methods}.')
# Check ratio
if (self.ratio <= 0) or (self.ratio > 1):
raise ValueError('`ratio` must be positive and less than one.')
@staticmethod
def _create_base_problem(
X_unshifted: np.ndarray,
X_shifted: np.ndarray,
alpha_tikhonov: float,
inv_method: str,
tsvd: tsvd.Tsvd,
picos_eps: float,
) -> picos.Problem:
"""Create optimization problem."""
# Validate ``alpha``
if alpha_tikhonov < 0:
raise ValueError('Parameter `alpha` must be positive or zero.')
# Validate ``inv_method``
valid_inv_methods = [
'inv', 'pinv', 'eig', 'ldl', 'chol', 'sqrt', 'svd'
]
if inv_method not in valid_inv_methods:
raise ValueError('`inv_method` must be one of '
f'{valid_inv_methods}.')
# Validate ``picos_eps``
if picos_eps < 0:
raise ValueError('Parameter `picos_eps` must be positive or zero.')
# Compute ``G`` and ``H``. ``alpha_tikhonov`` must already be scaled
# by ``q`` if applicable.
c, G, H, _ = _calc_c_G_H(X_unshifted, X_shifted, alpha_tikhonov)
# Optimization problem
problem = picos.Problem()
# Constants
G_T = picos.Constant('G^T', G.T)
# Variables
U = picos.RealVariable('U', (G.shape[0], H.shape[0]))
Z = picos.SymmetricVariable('Z', (G.shape[0], G.shape[0]))
# Constraints
problem.add_constraint(Z >> picos_eps)
# Choose method to handle inverse of H
if inv_method == 'inv':
H_inv = picos.Constant('H^-1', _calc_Hinv(H))
problem.add_constraint(picos.block([
[Z, U],
[U.T, H_inv],
]) >> picos_eps)
elif inv_method == 'pinv':
H_inv = picos.Constant('H^+', _calc_Hpinv(H))
problem.add_constraint(picos.block([
[Z, U],
[U.T, H_inv],
]) >> picos_eps)
elif inv_method == 'eig':
VsqrtLmb = picos.Constant('(V Lambda^(1/2))', _calc_VsqrtLmb(H))
problem.add_constraint(
picos.block([
[Z, U * VsqrtLmb],
[VsqrtLmb.T * U.T, 'I'],
]) >> picos_eps)
elif inv_method == 'ldl':
LsqrtD = picos.Constant('(L D^(1/2))', _calc_LsqrtD(H))
problem.add_constraint(
picos.block([
[Z, U * LsqrtD],
[LsqrtD.T * U.T, 'I'],
]) >> picos_eps)
elif inv_method == 'chol':
L = picos.Constant('L', _calc_L(H))
problem.add_constraint(
picos.block([
[Z, U * L],
[L.T * U.T, 'I'],
]) >> picos_eps)
elif inv_method == 'sqrt':
sqrtH = picos.Constant('sqrt(H)', _calc_sqrtH(H))
problem.add_constraint(
picos.block([
[Z, U * sqrtH],
[sqrtH.T * U.T, 'I'],
]) >> picos_eps)
elif inv_method == 'svd':
QSig = picos.Constant(
'Q Sigma', _calc_QSig(X_unshifted, alpha_tikhonov, tsvd))
problem.add_constraint(
picos.block([
[Z, U * QSig],
[QSig.T * U.T, 'I'],
]) >> picos_eps)
else:
# Should never, ever get here.
assert False
# Set objective
obj = c - 2 * picos.trace(U * G_T) + picos.trace(Z)
problem.set_objective('min', obj)
return problem
@staticmethod
def _extract_solution(problem: picos.Problem) -> np.ndarray:
"""Extract solution from an optimization problem."""
return np.array(problem.get_valued_variable('U'), ndmin=2).T
class LmiDmdc(LmiRegressor):
"""LMI-based DMDc with regularization.
Supports Tikhonov regularization, optionally mixed with matrix two-norm
regularization or nuclear norm regularization.
Attributes
----------
alpha_tikhonov_ : float
Tikhonov regularization coefficient used.
alpha_other_ : float
Matrix two norm or nuclear norm regularization coefficient used.
tsvd_unshifted_ : pykoop.Tsvd
Fit truncated SVD object for unshifted data matrix.
tsvd_shifted_ : pykoop.Tsvd
Fit truncated SVD object for shifted data matrix.
U_hat_ : np.ndarray
Reduced Koopman matrix for debugging.
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
LMI DMDc without regularization
>>> kp = pykoop.KoopmanPipeline(regressor=pykoop.lmi_regressors.LmiDmdc())
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdc())
LMI DMDc with Tikhonov regularization
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdc(
... alpha=1,
... reg_method='tikhonov',
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdc(alpha=1))
LMI DMDc with matrix two-norm regularization
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdc(
... alpha=1,
... reg_method='twonorm',
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdc(alpha=1, reg_method='twonorm'))
LMI DMDc with nuclear norm regularization and SVD truncation
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdc(
... alpha=1,
... reg_method='nuclear',
... tsvd_unshifted=pykoop.Tsvd('known_noise', 0.1),
... tsvd_shifted=pykoop.Tsvd('known_noise', 0.1),
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdc(alpha=1, reg_method='nuclear',
tsvd_shifted=Tsvd(truncation='known_noise', truncation_param=0.1),
tsvd_unshifted=Tsvd(truncation='known_noise', truncation_param=0.1)))
"""
def __init__(self,
alpha: float = 0,
ratio: float = 1,
tsvd_unshifted: tsvd.Tsvd = None,
tsvd_shifted: tsvd.Tsvd = None,
reg_method: str = 'tikhonov',
square_norm: bool = False,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None) -> None:
"""Instantiate :class:`LmiDmdc`.
Parameters
----------
alpha : float
Regularization coefficient. Can only be zero if
``reg_method='tikhonov'``.
ratio : float
Ratio of matrix two-norm or nuclear norm to use in mixed
regularization. If ``ratio=1``, no Tikhonov regularization is
used. Cannot be zero. Ignored if ``reg_method='tikhonov'``.
tsvd_unshifted : pykoop.Tsvd
Singular value truncation method used to change basis of unshifted
data matrix. If ``None``, economy SVD is used.
tsvd_shifted : pykoop.Tsvd
Singular value truncation method used to change basis of shifted
data matrix. If ``None``, economy SVD is used.
reg_method : str
Regularization method to use. Possible values are
- ``'tikhonov'`` -- pure Tikhonov regularization (``ratio``
is ignored),
- ``'twonorm'`` -- matrix two-norm regularization mixed with
Tikhonov regularization, or
- ``'nuclear'`` -- nuclear norm regularization mixed with Tikhonov
regularization.
square_norm : bool
Square norm in matrix two-norm or nuclear norm regularizer.
Enabling may increase computation time. Frobenius norm used in
Tikhonov regularizer is always squared.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.alpha = alpha
self.ratio = ratio
self.tsvd_unshifted = tsvd_unshifted
self.tsvd_shifted = tsvd_shifted
self.reg_method = reg_method
self.square_norm = square_norm
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Compute regularization coefficients
if self.reg_method == 'tikhonov':
self.alpha_tikhonov_ = self.alpha
self.alpha_other_ = 0.0
else:
self.alpha_tikhonov_ = self.alpha * (1.0 - self.ratio)
self.alpha_other_ = self.alpha * self.ratio
# Get needed sizes
q, p = X_unshifted.shape
p_theta = X_shifted.shape[1]
# Clone TSVDs
self.tsvd_unshifted_ = (sklearn.base.clone(self.tsvd_unshifted)
if self.tsvd_unshifted is not None else
tsvd.Tsvd())
self.tsvd_shifted_ = (sklearn.base.clone(self.tsvd_shifted) if
self.tsvd_shifted is not None else tsvd.Tsvd())
# Compute SVDs
self.tsvd_unshifted_.fit(X_unshifted.T)
Q_tld = self.tsvd_unshifted_.left_singular_vectors_
sig_tld = self.tsvd_unshifted_.singular_values_
Z_tld = self.tsvd_unshifted_.right_singular_vectors_
self.tsvd_shifted_.fit(X_shifted.T)
Q_hat = self.tsvd_shifted_.left_singular_vectors_
sig_hat = self.tsvd_shifted_.singular_values_
Z_hat = self.tsvd_shifted_.right_singular_vectors_
# Form optimization problem
problem = self._create_base_problem(Q_tld, sig_tld, Z_tld, Q_hat,
sig_hat, Z_hat,
self.alpha_tikhonov_ / q,
self.picos_eps)
if self.reg_method == 'twonorm':
problem = _add_twonorm(problem, problem.variables['U_hat'],
self.alpha_other_ / q, self.square_norm,
self.picos_eps)
elif self.reg_method == 'nuclear':
problem = _add_nuclear(problem, problem.variables['U_hat'],
self.alpha_other_ / q, self.square_norm,
self.picos_eps)
# Solve optimization problem
problem.solve(**self.solver_params_)
# Save solution status
self.solution_status_ = problem.last_solution.claimedStatus
# Extract solution from ``Problem`` object
U_hat = self._extract_solution(problem).T
# Save SVDs and reduced U for debugging
self.U_hat_ = U_hat
# Reconstruct Koopman operator
p_upsilon = p - p_theta
U = Q_hat @ U_hat @ linalg.block_diag(Q_hat, np.eye(p_upsilon)).T
coef = U.T
return coef
def _validate_parameters(self) -> None:
# Check regularization methods
valid_reg_methods = ['tikhonov', 'twonorm', 'nuclear']
if self.reg_method not in valid_reg_methods:
raise ValueError('`reg_method` must be one of '
f'{valid_reg_methods}.')
# Check ratio
if (self.ratio <= 0) or (self.ratio > 1):
raise ValueError('`ratio` must be positive and less than one.')
@staticmethod
def _create_base_problem(
Q_tld: np.ndarray,
sig_tld: np.ndarray,
Z_tld: np.ndarray,
Q_hat: np.ndarray,
sig_hat: np.ndarray,
Z_hat: np.ndarray,
alpha_tikhonov: float,
picos_eps: float,
) -> picos.Problem:
"""Create optimization problem."""
# Validate ``alpha``
if alpha_tikhonov < 0:
raise ValueError('Parameter `alpha` must be positive or zero.')
# Validate ``picos_eps``
if picos_eps < 0:
raise ValueError('Parameter `picos_eps` must be positive or zero.')
# Compute needed sizes
q = Z_hat.shape[0]
p, r_tld = Q_tld.shape
p_theta, r_hat = Q_hat.shape
# Compute Q_hat
p_upsilon = p - p_theta
Q_bar = linalg.block_diag(Q_hat, np.eye(p_upsilon)).T @ Q_tld
# Create optimization problem
problem = picos.Problem()
# Constants.
# Sigmas are scaled by ``1/sqrt(q)`` to scale cost function, like EDMD.
Sigma_hat_sq = picos.Constant('Sigma_hat^2', np.diag(sig_hat**2 / q))
# Add regularizer to ``Sigma_tld``
Sigma_hat = np.diag(sig_hat / np.sqrt(q))
Sigma_tld = np.diag(np.sqrt(sig_tld**2 / q + alpha_tikhonov))
big_constant = picos.Constant(
'Q_bar Sigma_tld Z_tld.T Z_hat Sigma_Hat',
Q_bar @ Sigma_tld @ Z_tld.T @ Z_hat @ Sigma_hat,
)
Q_bar_Sigma_tld = picos.Constant(
'Q_bar Sigma_tld',
Q_bar @ Sigma_tld,
)
m1 = picos.Constant('-1', -1 * np.eye(r_tld))
# Variables
U_hat = picos.RealVariable('U_hat', (r_hat, r_hat + p - p_theta))
W_hat = picos.SymmetricVariable('W_hat', (r_hat, r_hat))
# Constraints
problem.add_constraint(W_hat >> picos_eps)
problem.add_constraint(
picos.block([
[
-W_hat + Sigma_hat_sq - U_hat * big_constant
- big_constant.T * U_hat.T, U_hat * Q_bar_Sigma_tld
],
[Q_bar_Sigma_tld.T * U_hat.T, m1],
]) << picos_eps)
problem.set_objective('min', picos.trace(W_hat))
return problem
@staticmethod
def _extract_solution(problem: picos.Problem) -> np.ndarray:
"""Extract solution from an optimization problem."""
return np.array(problem.get_valued_variable('U_hat'), ndmin=2).T
class LmiEdmdSpectralRadiusConstr(LmiRegressor):
"""LMI-based EDMD with spectral radius constraint.
Optionally supports Tikhonov regularization.
Attributes
----------
tsvd_ : pykoop.Tsvd
Fit truncated SVD object.
P_ : np.ndarray
``P`` matrix for debugging.
objective_log_ : List[float]
Objective function history.
stop_reason_ : str
Reason iteration stopped.
n_iter_ : int
Number of iterations
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
Apply EDMD spectral radius constraint to mass-spring-damper data
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmdSpectralRadiusConstr(
... spectral_radius=0.9,
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmdSpectralRadiusConstr(spectral_radius=0.9))
"""
def __init__(self,
spectral_radius: float = 1.0,
max_iter: int = 100,
iter_atol: float = 1e-6,
iter_rtol: float = 0,
alpha: float = 0,
inv_method: str = 'svd',
tsvd: tsvd.Tsvd = None,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None) -> None:
"""Instantiate :class:`LmiEdmdSpectralRadiusConstr`.
To disable regularization, use ``alpha=0``.
Parameters
----------
spectral_radius : float
Maximum spectral radius.
max_iter : int
Maximum number of solver iterations.
iter_atol : float
Absolute tolerance for change in objective function value.
iter_rtol : float
Relative tolerance for change in objective function value.
alpha : float
Regularization coefficient. Can only be zero if
``reg_method='tikhonov'``.
inv_method : str
Method to handle or avoid inversion of the ``H`` matrix when
forming the LMI problem. Possible values are
- ``'inv'`` -- invert ``H`` directly,
- ``'pinv'`` -- apply the Moore-Penrose pseudoinverse to ``H``,
- ``'eig'`` -- split ``H`` using an eigendecomposition,
- ``'ldl'`` -- split ``H`` using an LDL decomposition,
- ``'chol'`` -- split ``H`` using a Cholesky decomposition,
- ``'sqrt'`` -- split ``H`` using :func:`scipy.linalg.sqrtm()`, or
- ``'svd'`` -- split ``H`` using a singular value decomposition.
tsvd : pykoop.Tsvd
Singular value truncation method if ``inv_method='svd'``. If
``None``, economy SVD is used.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.spectral_radius = spectral_radius
self.max_iter = max_iter
self.iter_atol = iter_atol
self.iter_rtol = iter_rtol
self.alpha = alpha
self.inv_method = inv_method
self.tsvd = tsvd
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Clone TSVD
self.tsvd_ = (sklearn.base.clone(self.tsvd)
if self.tsvd is not None else tsvd.Tsvd())
# Get needed sizes
p = X_unshifted.shape[1]
p_theta = X_shifted.shape[1]
# Make initial guesses and iterate
P = np.eye(p_theta)
# Set scope of other variables
U = np.zeros((p_theta, p))
self.objective_log_ = []
for k in range(self.max_iter):
# Formulate Problem A
problem_a = self._create_problem_a(X_unshifted, X_shifted, P)
# Solve Problem A
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem A{k}')
problem_a.solve(**self.solver_params_)
solution_status_a = problem_a.last_solution.claimedStatus
if solution_status_a != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_a`. Used last valid `U`. '
f'Solution status: `{solution_status_a}`.')
log.warn(self.stop_reason_)
break
U = np.array(problem_a.get_valued_variable('U'), ndmin=2)
# Check stopping condition
self.objective_log_.append(problem_a.value)
if len(self.objective_log_) > 1:
curr_obj = self.objective_log_[-1]
prev_obj = self.objective_log_[-2]
diff_obj = prev_obj - curr_obj
log.info(f'Objective: {curr_obj}; Change: {diff_obj}.')
if np.allclose(curr_obj,
prev_obj,
atol=self.iter_atol,
rtol=self.iter_rtol):
self.stop_reason_ = f'Reached tolerance {diff_obj}'
break
# Formulate Problem B
problem_b = self._create_problem_b(U)
# Solve Problem B
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem B{k}')
problem_b.solve(**self.solver_params_)
solution_status_b = problem_b.last_solution.claimedStatus
if solution_status_b != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_b`. Used last valid `U`. '
f'Solution status: `{solution_status_b}`.')
log.warn(self.stop_reason_)
break
P = np.array(problem_b.get_valued_variable('P'), ndmin=2)
else:
self.stop_reason_ = f'Reached maximum iterations {self.max_iter}'
log.warn(self.stop_reason_)
self.n_iter_ = k + 1
coef = U.T
# Only useful for debugging
self.P_ = P
return coef
def _validate_parameters(self) -> None:
# Check spectral radius
if self.spectral_radius <= 0:
raise ValueError('`spectral_radius` must be positive.')
if self.max_iter <= 0:
raise ValueError('`max_iter` must be positive.')
if self.iter_atol < 0:
raise ValueError('`iter_atol` must be positive or zero.')
if self.iter_rtol < 0:
raise ValueError('`iter_rtol` must be positive or zero.')
def _create_problem_a(self, X_unshifted: np.ndarray, X_shifted: np.ndarray,
P: np.ndarray) -> picos.Problem:
"""Create first problem in iteration scheme."""
q = X_unshifted.shape[0]
problem_a = LmiEdmd._create_base_problem(X_unshifted, X_shifted,
self.alpha / q,
self.inv_method, self.tsvd_,
self.picos_eps)
# Extract information from problem
U = problem_a.variables['U']
# Get needed sizes
p_theta = U.shape[0]
# Add new constraints
rho_bar = picos.Constant('rho_bar', self.spectral_radius)
P = picos.Constant('P', P)
problem_a.add_constraint(
picos.block([
# Use ``(P + P.T) / 2`` so PICOS understands it's symmetric.
[rho_bar * (P + P.T) / 2, U[:, :p_theta].T * P],
[P.T * U[:, :p_theta], rho_bar * (P + P.T) / 2],
]) >> self.picos_eps)
return problem_a
def _create_problem_b(self, U: np.ndarray) -> picos.Problem:
"""Create second problem in iteration scheme."""
# Create optimization problem
problem_b = picos.Problem()
# Get needed sizes
p_theta = U.shape[0]
# Create constants
rho_bar = picos.Constant('rho_bar', self.spectral_radius)
U = picos.Constant('U', U)
# Create variables
P = picos.SymmetricVariable('P', p_theta)
# Add constraints
problem_b.add_constraint(P >> self.picos_eps)
problem_b.add_constraint(
picos.block([
[rho_bar * P, U[:, :p_theta].T * P],
[P.T * U[:, :p_theta], rho_bar * P],
]) >> self.picos_eps)
# Set objective
problem_b.set_objective('find')
return problem_b
class LmiDmdcSpectralRadiusConstr(LmiRegressor):
"""LMI-based Dmdc with spectral radius constraint.
Optionally supports Tikhonov regularization.
Attributes
----------
tsvd_unshifted_ : pykoop.Tsvd
Fit truncated SVD object for unshifted data matrix.
tsvd_shifted_ : pykoop.Tsvd
Fit truncated SVD object for shifted data matrix.
U_hat_ : np.ndarray
Reduced Koopman matrix for debugging.
P_ : np.ndarray
``P`` matrix for debugging.
objective_log_ : List[float]
Objective function history.
stop_reason_ : str
Reason iteration stopped.
n_iter_ : int
Number of iterations
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
Apply DMDc spectral radius constraint to mass-spring-damper data
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdcSpectralRadiusConstr(
... spectral_radius=0.9,
... tsvd_unshifted=pykoop.Tsvd('cutoff', 1e-6),
... tsvd_shifted=pykoop.Tsvd('cutoff', 1e-6),
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdcSpectralRadiusConstr(spectral_radius=0.9,
tsvd_shifted=Tsvd(truncation='cutoff', truncation_param=1e-06),
tsvd_unshifted=Tsvd(truncation='cutoff', truncation_param=1e-06)))
"""
def __init__(self,
spectral_radius: float = 1.0,
max_iter: int = 100,
iter_atol: float = 1e-6,
iter_rtol: float = 0,
alpha: float = 0,
tsvd_unshifted: tsvd.Tsvd = None,
tsvd_shifted: tsvd.Tsvd = None,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None) -> None:
"""Instantiate :class:`LmiDmdcSpectralRadiusConstr`.
To disable regularization, use ``alpha=0``.
Parameters
----------
spectral_radius : float
Maximum spectral radius.
max_iter : int
Maximum number of solver iterations.
iter_atol : float
Absolute tolerance for change in objective function value.
iter_rtol : float
Relative tolerance for change in objective function value.
alpha : float
Tikhonov regularization coefficient.
tsvd_unshifted : pykoop.Tsvd
Singular value truncation method used to change basis of unshifted
data matrix. If ``None``, economy SVD is used.
tsvd_shifted : pykoop.Tsvd
Singular value truncation method used to change basis of shifted
data matrix. If ``None``, economy SVD is used.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.spectral_radius = spectral_radius
self.max_iter = max_iter
self.iter_atol = iter_atol
self.iter_rtol = iter_rtol
self.alpha = alpha
self.tsvd_unshifted = tsvd_unshifted
self.tsvd_shifted = tsvd_shifted
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Get needed sizes
p = X_unshifted.shape[1]
p_theta = X_shifted.shape[1]
# Clone TSVDs
self.tsvd_unshifted_ = (sklearn.base.clone(self.tsvd_unshifted)
if self.tsvd_unshifted is not None else
tsvd.Tsvd())
self.tsvd_shifted_ = (sklearn.base.clone(self.tsvd_shifted) if
self.tsvd_shifted is not None else tsvd.Tsvd())
# Compute SVDs
self.tsvd_unshifted_.fit(X_unshifted.T)
Q_tld = self.tsvd_unshifted_.left_singular_vectors_
sig_tld = self.tsvd_unshifted_.singular_values_
Z_tld = self.tsvd_unshifted_.right_singular_vectors_
self.tsvd_shifted_.fit(X_shifted.T)
Q_hat = self.tsvd_shifted_.left_singular_vectors_
sig_hat = self.tsvd_shifted_.singular_values_
Z_hat = self.tsvd_shifted_.right_singular_vectors_
# Get truncation values
r_tld = Q_tld.shape[1]
r_hat = Q_hat.shape[1]
# Make initial guesses and iterate
P = np.eye(r_hat)
# Set scope of other variables
U_hat = np.zeros((r_hat, r_hat + p - p_theta))
self.objective_log_ = []
for k in range(self.max_iter):
# Formulate Problem A
problem_a = self._create_problem_a(Q_tld, sig_tld, Z_tld, Q_hat,
sig_hat, Z_hat, P)
# Solve Problem A
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem A{k}')
problem_a.solve(**self.solver_params_)
solution_status_a = problem_a.last_solution.claimedStatus
if solution_status_a != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_a`. Used last valid `U_hat`. '
f'Solution status: `{solution_status_a}`.')
log.warn(self.stop_reason_)
break
U_hat = np.array(problem_a.get_valued_variable('U_hat'), ndmin=2)
# Check stopping condition
self.objective_log_.append(problem_a.value)
if len(self.objective_log_) > 1:
curr_obj = self.objective_log_[-1]
prev_obj = self.objective_log_[-2]
diff_obj = prev_obj - curr_obj
log.info(f'Objective: {curr_obj}; Change: {diff_obj}.')
if np.allclose(curr_obj,
prev_obj,
atol=self.iter_atol,
rtol=self.iter_rtol):
self.stop_reason_ = f'Reached tolerance {diff_obj}'
break
# Formulate Problem B
problem_b = self._create_problem_b(U_hat)
# Solve Problem B
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem B{k}')
problem_b.solve(**self.solver_params_)
solution_status_b = problem_b.last_solution.claimedStatus
if solution_status_b != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_b`. Used last valid `U_hat`. '
f'Solution status: `{solution_status_b}`.')
log.warn(self.stop_reason_)
break
P = np.array(problem_b.get_valued_variable('P'), ndmin=2)
else:
self.stop_reason_ = f'Reached maximum iterations {self.max_iter}'
log.warn(self.stop_reason_)
self.n_iter_ = k + 1
p_upsilon = p - p_theta
U = Q_hat @ U_hat @ linalg.block_diag(Q_hat, np.eye(p_upsilon)).T
coef = U.T
# Only useful for debugging
self.U_hat_ = U_hat
self.P_ = P
return coef
def _validate_parameters(self) -> None:
# Check spectral radius
if self.spectral_radius <= 0:
raise ValueError('`spectral_radius` must be positive.')
if self.max_iter <= 0:
raise ValueError('`max_iter` must be positive.')
if self.iter_atol < 0:
raise ValueError('`iter_atol` must be positive or zero.')
if self.iter_rtol < 0:
raise ValueError('`iter_rtol` must be positive or zero.')
def _create_problem_a(
self,
Q_tld: np.ndarray,
sig_tld: np.ndarray,
Z_tld: np.ndarray,
Q_hat: np.ndarray,
sig_hat: np.ndarray,
Z_hat: np.ndarray,
P: np.ndarray,
) -> picos.Problem:
"""Create first problem in iteration scheme."""
q = Z_hat.shape[0]
problem_a = LmiDmdc._create_base_problem(Q_tld, sig_tld, Z_tld, Q_hat,
sig_hat, Z_hat,
self.alpha / q,
self.picos_eps)
# Extract information from problem
U_hat = problem_a.variables['U_hat']
# Get needed sizes
p_theta = U_hat.shape[0]
# Add new constraints
rho_bar = picos.Constant('rho_bar', self.spectral_radius)
P = picos.Constant('P', P)
problem_a.add_constraint(
picos.block([
# Use ``(P + P.T) / 2`` so PICOS understands it's symmetric.
[rho_bar * (P + P.T) / 2, U_hat[:, :p_theta].T * P],
[P.T * U_hat[:, :p_theta], rho_bar * (P + P.T) / 2],
]) >> self.picos_eps)
return problem_a
def _create_problem_b(self, U_hat: np.ndarray) -> picos.Problem:
"""Create second problem in iteration scheme."""
# Create optimization problem
problem_b = picos.Problem()
# Get needed sizes
p_theta = U_hat.shape[0]
# Create constants
rho_bar = picos.Constant('rho_bar', self.spectral_radius)
U = picos.Constant('U', U_hat)
# Create variables
P = picos.SymmetricVariable('P', p_theta)
# Add constraints
problem_b.add_constraint(P >> self.picos_eps)
problem_b.add_constraint(
picos.block([
[rho_bar * P, U_hat[:, :p_theta].T * P],
[P.T * U_hat[:, :p_theta], rho_bar * P],
]) >> self.picos_eps)
# Set objective
problem_b.set_objective('find')
return problem_b
class LmiEdmdHinfReg(LmiRegressor):
"""LMI-based EDMD with H-infinity norm regularization.
Optionally supports additional Tikhonov regularization.
Attributes
----------
tsvd_ : pykoop.Tsvd
Fit truncated SVD object.
P_ : np.ndarray
``P`` matirx for debugging.
gamma_ : np.ndarray
H-infinity norm for debugging.
objective_log_ : List[float]
Objective function history.
stop_reason_ : str
Reason iteration stopped.
n_iter_ : int
Number of iterations
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
Apply EDMD with H-infinity regularization to mass-spring-damper data
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmdHinfReg(
... alpha=1e-3,
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmdHinfReg(alpha=0.001))
Apply EDMD with weighted H-infinity regularization to mass-spring-damper
data
>>> from scipy import signal
>>> ss_ct = signal.ZerosPolesGain([0], [-4], [1]).to_ss()
>>> ss_dt = ss_ct.to_discrete(dt=0.1, method='bilinear')
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmdHinfReg(
... alpha=1e-3,
... weight=('pre', ss_dt.A, ss_dt.B, ss_dt.C, ss_dt.D),
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmdHinfReg(alpha=0.001,
weight=('pre', array([[...]]), array([[...]]), array([[...]]),
array([[...]]))))
"""
def __init__(
self,
alpha: float = 1,
ratio: float = 1,
weight: Tuple[str, np.ndarray, np.ndarray, np.ndarray,
np.ndarray] = None,
max_iter: int = 100,
iter_atol: float = 1e-6,
iter_rtol: float = 0,
inv_method: str = 'svd',
tsvd: tsvd.Tsvd = None,
square_norm: bool = False,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None,
) -> None:
"""Instantiate :class:`LmiEdmdHinfReg`.
Supports cascading the plant with an LTI weighting function.
Parameters
----------
alpha : float
Regularization coefficient. Cannot be zero.
ratio : float
Ratio of H-infinity norm to use in mixed regularization. If
``ratio=1``, no Tikhonov regularization is used. Cannot be zero.
weight : Tuple[str, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
Tuple containing weight type (``'pre'`` or ``'post'``), and the
weight state space matrices (``A``, ``B``, ``C``, and ``D``). If
``None``, no weighting is used.
max_iter : int
Maximum number of solver iterations.
iter_atol : float
Absolute tolerance for change in objective function value.
iter_rtol : float
Relative tolerance for change in objective function value.
inv_method : str
Method to handle or avoid inversion of the ``H`` matrix when
forming the LMI problem. Possible values are
- ``'inv'`` -- invert ``H`` directly,
- ``'pinv'`` -- apply the Moore-Penrose pseudoinverse to ``H``,
- ``'eig'`` -- split ``H`` using an eigendecomposition,
- ``'ldl'`` -- split ``H`` using an LDL decomposition,
- ``'chol'`` -- split ``H`` using a Cholesky decomposition,
- ``'sqrt'`` -- split ``H`` using :func:`scipy.linalg.sqrtm()`, or
- ``'svd'`` -- split ``H`` using a singular value decomposition.
tsvd : pykoop.Tsvd
Singular value truncation method if ``inv_method='svd'``. If
``None``, economy SVD is used.
square_norm : bool
Square norm H-infinity norm in regularizer. Enabling may increase
computation time. Frobenius norm used in Tikhonov regularizer is
always squared.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.alpha = alpha
self.ratio = ratio
self.weight = weight
self.max_iter = max_iter
self.iter_atol = iter_atol
self.iter_rtol = iter_rtol
self.inv_method = inv_method
self.tsvd = tsvd
self.square_norm = square_norm
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Clone TSVD
self.tsvd_ = (sklearn.base.clone(self.tsvd)
if self.tsvd is not None else tsvd.Tsvd())
# Set regularization coefficients
self.alpha_tikhonov_ = self.alpha * (1 - self.ratio)
self.alpha_other_ = self.alpha * self.ratio
# Get needed sizes
p = X_unshifted.shape[1]
p_theta = X_shifted.shape[1]
# Check that at least one input is present
if p_theta == p:
# If you remove the ``{p} features(s)`` part of this message,
# the scikit-learn estimator_checks will fail!
raise ValueError('`LmiEdmdHinfReg()` requires an input to '
'function. `X` and `y` must therefore have '
'different numbers of features. `X` and `y` both '
f'have {p} feature(s).')
# Set up weights
if self.weight is None:
P = np.eye(p_theta)
elif self.weight[0] == 'pre':
n_u = p - p_theta
P = np.eye(p_theta + n_u * self.weight[1].shape[0])
elif self.weight[0] == 'post':
n_x = p_theta
P = np.eye(p_theta + n_x * self.weight[1].shape[0])
else:
# Already checked. Should never get here.
assert False
# Solve optimization problem iteratively
U = np.zeros((p_theta, p))
gamma = np.zeros((1, ))
self.objective_log_ = []
for k in range(self.max_iter):
# Formulate Problem A
problem_a = self._create_problem_a(X_unshifted, X_shifted, P)
# Solve Problem A
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem A{k}')
problem_a.solve(**self.solver_params_)
solution_status_a = problem_a.last_solution.claimedStatus
if solution_status_a != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_a`. Used last valid `U`. '
f'Solution status: `{solution_status_a}`.')
log.warn(self.stop_reason_)
break
U = np.array(problem_a.get_valued_variable('U'), ndmin=2)
gamma = np.array(problem_a.get_valued_variable('gamma'))
self.objective_log_.append(problem_a.value)
if len(self.objective_log_) > 1:
curr_obj = self.objective_log_[-1]
prev_obj = self.objective_log_[-2]
diff_obj = prev_obj - curr_obj
log.info(f'Objective: {curr_obj}; Change: {diff_obj}.')
if np.allclose(curr_obj,
prev_obj,
atol=self.iter_atol,
rtol=self.iter_rtol):
self.stop_reason_ = f'Reached tolerance {diff_obj}'
break
# Formulate Problem B
problem_b = self._create_problem_b(U, gamma)
# Solve Problem B
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem B{k}')
problem_b.solve(**self.solver_params_)
solution_status_b = problem_b.last_solution.claimedStatus
if solution_status_b != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_b`. Used last valid `U`. '
'Solution status: f`{solution_status_b}`.')
log.warn(self.stop_reason_)
break
P = np.array(problem_b.get_valued_variable('P'), ndmin=2)
else:
self.stop_reason_ = f'Reached maximum iterations {self.max_iter}'
log.warn(self.stop_reason_)
self.n_iter_ = k + 1
coef = U.T
# Only useful for debugging
self.P_ = P
self.gamma_ = gamma
return coef
def _validate_parameters(self) -> None:
if (self.ratio <= 0) or (self.ratio > 1):
raise ValueError('`ratio` must be positive and less than one.')
valid_weight_types = ['pre', 'post']
if self.weight is not None:
if self.weight[0] not in valid_weight_types:
raise ValueError('First element of the `weight` must be one '
f'of {valid_weight_types}.')
if self.max_iter <= 0:
raise ValueError('`max_iter` must be positive.')
if self.iter_atol < 0:
raise ValueError('`iter_atol` must be positive or zero.')
if self.iter_rtol < 0:
raise ValueError('`iter_rtol` must be positive or zero.')
def _create_problem_a(self, X_unshifted: np.ndarray, X_shifted: np.ndarray,
P: np.ndarray) -> picos.Problem:
"""Create first problem in iteration scheme."""
q = X_unshifted.shape[0]
problem_a = LmiEdmd._create_base_problem(X_unshifted, X_shifted,
self.alpha_tikhonov_ / q,
self.inv_method, self.tsvd_,
self.picos_eps)
# Extract information from problem
U = problem_a.variables['U']
direction = problem_a.objective.direction
objective = problem_a.objective.function
# Get needed sizes
p_theta = U.shape[0]
# Add new constraint
P = picos.Constant('P', P)
gamma = picos.RealVariable('gamma', 1)
# Get weighted state space matrices
A, B, C, D = _create_ss(U, self.weight)
gamma_33 = picos.diag(gamma, D.shape[1])
gamma_44 = picos.diag(gamma, D.shape[0])
problem_a.add_constraint(
picos.block([
[P, A * P, B, 0],
[P.T * A.T, P, 0, P * C.T],
[B.T, 0, gamma_33, D.T],
[0, C * P.T, D, gamma_44],
]) >> self.picos_eps) # yapf: disable
# Add term to cost function
if self.alpha_other_ <= 0:
raise ValueError('`alpha_other_` must be positive.')
alpha_scaled = picos.Constant('alpha_scaled_inf',
self.alpha_other_ / q)
if self.square_norm:
objective += alpha_scaled * gamma**2
else:
objective += alpha_scaled * gamma
problem_a.set_objective(direction, objective)
return problem_a
def _create_problem_b(self, U: np.ndarray,
gamma: np.ndarray) -> picos.Problem:
"""Create second problem in iteration scheme."""
# Create optimization problem
problem_b = picos.Problem()
# Get needed sizes
p_theta = U.shape[0]
# Create constants
U = picos.Constant('U', U)
gamma = picos.Constant('gamma', gamma)
# Get weighted state space matrices
A, B, C, D = _create_ss(U, self.weight)
# Create variables
P = picos.SymmetricVariable('P', A.shape[0])
# Add constraints
problem_b.add_constraint(P >> self.picos_eps)
gamma_33 = picos.diag(gamma, D.shape[1])
gamma_44 = picos.diag(gamma, D.shape[0])
problem_b.add_constraint(
picos.block([
[P, A * P, B, 0],
[P.T * A.T, P, 0, P * C.T],
[B.T, 0, gamma_33, D.T],
[0, C * P.T, D, gamma_44],
]) >> self.picos_eps) # yapf: disable
# Set objective
problem_b.set_objective('find')
return problem_b
class LmiDmdcHinfReg(LmiRegressor):
"""LMI-based DMDc with H-infinity norm regularization.
Optionally supports additional Tikhonov regularization.
Attributes
----------
tsvd_unshifted_ : pykoop.Tsvd
Fit truncated SVD object for unshifted data matrix.
tsvd_shifted_ : pykoop.Tsvd
Fit truncated SVD object for shifted data matrix.
U_hat_ : np.ndarray
Reduced Koopman matrix for debugging.
P_ : np.ndarray
``P`` matirx for debugging.
gamma_ : np.ndarray
H-infinity norm for debugging.
objective_log_ : List[float]
Objective function history.
stop_reason_ : str
Reason iteration stopped.
n_iter_ : int
Number of iterations
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix
Examples
--------
Apply DMDc with H-infinity regularization to mass-spring-damper data
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdcHinfReg(
... alpha=1e-3,
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdcHinfReg(alpha=0.001))
Apply reduced-order DMDc with weighted H-infinity regularization to
mass-spring-damper data
>>> from scipy import signal
>>> ss_ct = signal.ZerosPolesGain([0], [-4], [1]).to_ss()
>>> ss_dt = ss_ct.to_discrete(dt=0.1, method='bilinear')
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiDmdcHinfReg(
... alpha=1e-3,
... weight=('pre', ss_dt.A, ss_dt.B, ss_dt.C, ss_dt.D),
... tsvd_unshifted=pykoop.Tsvd('cutoff', 1e-3),
... tsvd_shifted=pykoop.Tsvd('cutoff', 1e-3),
... )
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiDmdcHinfReg(alpha=0.001,
tsvd_shifted=Tsvd(truncation='cutoff', truncation_param=0.001),
tsvd_unshifted=Tsvd(truncation='cutoff', truncation_param=0.001),
weight=('pre', array([[...]]), array([[...]]), array([[...]]),
array([[...]]))))
"""
def __init__(
self,
alpha: float = 1,
ratio: float = 1,
weight: Tuple[str, np.ndarray, np.ndarray, np.ndarray,
np.ndarray] = None,
max_iter: int = 100,
iter_atol: float = 1e-6,
iter_rtol: float = 0,
tsvd_unshifted: tsvd.Tsvd = None,
tsvd_shifted: tsvd.Tsvd = None,
square_norm: bool = False,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None,
) -> None:
"""Instantiate :class:`LmiDmdcHinfReg`.
Supports cascading the plant with an LTI weighting function.
Parameters
----------
alpha : float
Regularization coefficient. Cannot be zero.
ratio : float
Ratio of H-infinity norm to use in mixed regularization. If
``ratio=1``, no Tikhonov regularization is used. Cannot be zero.
weight : Tuple[str, np.ndarray, np.ndarray, np.ndarray, np.ndarray]
Tuple containing weight type (``'pre'`` or ``'post'``), and the
weight state space matrices (``A``, ``B``, ``C``, and ``D``). If
``None``, no weighting is used.
max_iter : int
Maximum number of solver iterations.
iter_atol : float
Absolute tolerance for change in objective function value.
iter_rtol : float
Relative tolerance for change in objective function value.
tsvd_unshifted : pykoop.Tsvd
Singular value truncation method used to change basis of unshifted
data matrix. If ``None``, economy SVD is used.
tsvd_shifted : pykoop.Tsvd
Singular value truncation method used to change basis of shifted
data matrix. If ``None``, economy SVD is used.
square_norm : bool
Square norm H-infinity norm in regularizer. Enabling may increase
computation time. Frobenius norm used in Tikhonov regularizer is
always squared.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.alpha = alpha
self.ratio = ratio
self.weight = weight
self.max_iter = max_iter
self.iter_atol = iter_atol
self.iter_rtol = iter_rtol
self.tsvd_unshifted = tsvd_unshifted
self.tsvd_shifted = tsvd_shifted
self.square_norm = square_norm
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Clone TSVDs
self.tsvd_unshifted_ = (sklearn.base.clone(self.tsvd_unshifted)
if self.tsvd_unshifted is not None else
tsvd.Tsvd())
self.tsvd_shifted_ = (sklearn.base.clone(self.tsvd_shifted) if
self.tsvd_shifted is not None else tsvd.Tsvd())
# Set regularization coefficients
self.alpha_tikhonov_ = self.alpha * (1 - self.ratio)
self.alpha_other_ = self.alpha * self.ratio
# Get needed sizes
p = X_unshifted.shape[1]
p_theta = X_shifted.shape[1]
# Check that at least one input is present
if p_theta == p:
# If you remove the ``{p} features(s)`` part of this message,
# the scikit-learn estimator_checks will fail!
raise ValueError('`LmiDmdcHinfReg()` requires an input to '
'function. `X` and `y` must therefore have '
'different numbers of features. `X` and `y` both '
f'have {p} feature(s).')
# Compute SVDs
self.tsvd_unshifted_.fit(X_unshifted.T)
Q_tld = self.tsvd_unshifted_.left_singular_vectors_
sig_tld = self.tsvd_unshifted_.singular_values_
Z_tld = self.tsvd_unshifted_.right_singular_vectors_
self.tsvd_shifted_.fit(X_shifted.T)
Q_hat = self.tsvd_shifted_.left_singular_vectors_
sig_hat = self.tsvd_shifted_.singular_values_
Z_hat = self.tsvd_shifted_.right_singular_vectors_
r_tld = Q_tld.shape[1]
r_hat = Q_hat.shape[1]
# Set up weights
if self.weight is None:
P = np.eye(r_hat)
elif self.weight[0] == 'pre':
p_upsilon = p - p_theta
P = np.eye(r_hat + p_upsilon * self.weight[1].shape[0])
elif self.weight[0] == 'post':
P = np.eye(r_hat + p_theta * self.weight[1].shape[0])
else:
# Already checked. Should never get here.
assert False
# Solve optimization problem iteratively
U_hat = np.zeros((r_hat, r_hat + p - p_theta))
gamma = np.zeros((1, ))
self.objective_log_ = []
for k in range(self.max_iter):
# Formulate Problem A
problem_a = self._create_problem_a(Q_tld, sig_tld, Z_tld, Q_hat,
sig_hat, Z_hat, P)
# Solve Problem A
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem A{k}')
problem_a.solve(**self.solver_params_)
solution_status_a = problem_a.last_solution.claimedStatus
if solution_status_a != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_a`. Used last valid `U_hat`. '
f'Solution status: `{solution_status_a}`.')
log.warn(self.stop_reason_)
break
U_hat = np.array(problem_a.get_valued_variable('U_hat'), ndmin=2)
gamma = np.array(problem_a.get_valued_variable('gamma'))
self.objective_log_.append(problem_a.value)
if len(self.objective_log_) > 1:
curr_obj = self.objective_log_[-1]
prev_obj = self.objective_log_[-2]
diff_obj = prev_obj - curr_obj
log.info(f'Objective: {curr_obj}; Change: {diff_obj}.')
if np.allclose(curr_obj,
prev_obj,
atol=self.iter_atol,
rtol=self.iter_rtol):
self.stop_reason_ = f'Reached tolerance {diff_obj}'
break
# Formulate Problem B
problem_b = self._create_problem_b(U_hat, gamma)
# Solve Problem B
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem B{k}')
problem_b.solve(**self.solver_params_)
solution_status_b = problem_b.last_solution.claimedStatus
if solution_status_b != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_b`. Used last valid `U_hat`. '
'Solution status: f`{solution_status_b}`.')
log.warn(self.stop_reason_)
break
P = np.array(problem_b.get_valued_variable('P'), ndmin=2)
else:
self.stop_reason_ = f'Reached maximum iterations {self.max_iter}'
log.warn(self.stop_reason_)
self.n_iter_ = k + 1
p_upsilon = p - p_theta
U = Q_hat @ U_hat @ linalg.block_diag(Q_hat, np.eye(p_upsilon)).T
coef = U.T
# Only useful for debugging
self.U_hat_ = U_hat
self.P_ = P
self.gamma_ = gamma
return coef
def _validate_parameters(self) -> None:
if (self.ratio <= 0) or (self.ratio > 1):
raise ValueError('`ratio` must be positive and less than one.')
valid_weight_types = ['pre', 'post']
if self.weight is not None:
if self.weight[0] not in valid_weight_types:
raise ValueError('First element of the `weight` must be one'
f'of {valid_weight_types}.')
if self.max_iter <= 0:
raise ValueError('`max_iter` must be positive.')
if self.iter_atol < 0:
raise ValueError('`iter_atol` must be positive or zero.')
if self.iter_rtol < 0:
raise ValueError('`iter_rtol` must be positive or zero.')
def _create_problem_a(
self,
Q_tld: np.ndarray,
sig_tld: np.ndarray,
Z_tld: np.ndarray,
Q_hat: np.ndarray,
sig_hat: np.ndarray,
Z_hat: np.ndarray,
P: np.ndarray,
) -> picos.Problem:
"""Create first problem in iteration scheme."""
q = Z_hat.shape[0]
problem_a = LmiDmdc._create_base_problem(Q_tld, sig_tld, Z_tld, Q_hat,
sig_hat, Z_hat,
self.alpha_tikhonov_ / q,
self.picos_eps)
# Extract information from problem
U_hat = problem_a.variables['U_hat']
direction = problem_a.objective.direction
objective = problem_a.objective.function
# Get needed sizes
p_theta = U_hat.shape[0]
# Add new constraint
P = picos.Constant('P', P)
gamma = picos.RealVariable('gamma', 1)
# Get weighted state space matrices
A, B, C, D = _create_ss(
U_hat,
self.weight,
Q_hat=self.tsvd_shifted_.left_singular_vectors_,
)
gamma_33 = picos.diag(gamma, D.shape[1])
gamma_44 = picos.diag(gamma, D.shape[0])
problem_a.add_constraint(
picos.block([
[P, A * P, B, 0],
[P.T * A.T, P, 0, P * C.T],
[B.T, 0, gamma_33, D.T],
[0, C * P.T, D, gamma_44],
]) >> self.picos_eps) # yapf: disable
# Add term to cost function
if self.alpha_other_ <= 0:
raise ValueError('`alpha_other_` must be positive.')
alpha_scaled = picos.Constant('alpha_scaled_inf',
self.alpha_other_ / q)
if self.square_norm:
objective += alpha_scaled * gamma**2
else:
objective += alpha_scaled * gamma
problem_a.set_objective(direction, objective)
return problem_a
def _create_problem_b(self, U_hat: np.ndarray,
gamma: np.ndarray) -> picos.Problem:
"""Create second problem in iteration scheme."""
# Create optimization problem
problem_b = picos.Problem()
# Get needed sizes
p_theta = U_hat.shape[0]
# Create constants
U_hat = picos.Constant('U_hat', U_hat)
gamma = picos.Constant('gamma', gamma)
# Get weighted state space matrices
A, B, C, D = _create_ss(
U_hat,
self.weight,
Q_hat=self.tsvd_shifted_.left_singular_vectors_,
)
# Create variables
P = picos.SymmetricVariable('P', A.shape[0])
# Add constraints
problem_b.add_constraint(P >> self.picos_eps)
gamma_33 = picos.diag(gamma, D.shape[1])
gamma_44 = picos.diag(gamma, D.shape[0])
problem_b.add_constraint(
picos.block([
[P, A * P, B, 0],
[P.T * A.T, P, 0, P * C.T],
[B.T, 0, gamma_33, D.T],
[0, C * P.T, D, gamma_44],
]) >> self.picos_eps) # yapf: disable
# Set objective
problem_b.set_objective('find')
return problem_b
class LmiEdmdDissipativityConstr(LmiRegressor):
"""LMI-based EDMD with dissipativity constraint.
Optionally supports additional Tikhonov regularization.
Originally proposed in [dissip]_.
Attributes
----------
tsvd_ : pykoop.Tsvd
Fit truncated SVD object.
objective_log_ : List[float]
Objective function history.
stop_reason_ : str
Reason iteration stopped.
n_iter_ : int
Number of iterations
solver_params_ : Dict[str, Any]
Solver parameters used (defaults merged with constructor input).
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
Examples
--------
Apply dissipativity-constrainted EDMD to mass-spring-damper data
>>> kp = pykoop.KoopmanPipeline(
... regressor=pykoop.lmi_regressors.LmiEdmdDissipativityConstr()
... )
>>> kp.fit(X_msd, n_inputs=1, episode_feature=True)
KoopmanPipeline(regressor=LmiEdmdDissipativityConstr())
"""
def __init__(
self,
alpha: float = 1,
supply_rate: np.ndarray = None,
max_iter: int = 100,
iter_atol: float = 1e-6,
iter_rtol: float = 0,
inv_method: str = 'svd',
tsvd: tsvd.Tsvd = None,
picos_eps: float = 0,
solver_params: Dict[str, Any] = None,
) -> None:
"""Instantiate :class:`LmiEdmdDissipativityConstr`.
The supply rate ``s(u, y)`` is specified by ``Xi``::
s(u, y) = -[y, u] Xi [y; u]
Some example supply rate matrices ``Xi`` are::
Xi = [0, -1; -1, 0] -> passivity,
Xi = [1/gamma, 0; 0, -gamma] -> bounded L2 gain of gamma.
Parameters
----------
alpha : float
Regularization coefficient. Cannot be zero.
supply_rate : np.ndarray
Supply rate matrix ``Xi``, where ``s(u, y) = -[y, u] Xi [y; u]``.
If ``None``, the an L2 gain of ``gamma=1`` is imposed.
max_iter : int
Maximum number of solver iterations.
iter_atol : float
Absolute tolerance for change in objective function value.
iter_rtol : float
Relative tolerance for change in objective function value.
inv_method : str
Method to handle or avoid inversion of the ``H`` matrix when
forming the LMI problem. Possible values are
- ``'inv'`` -- invert ``H`` directly,
- ``'pinv'`` -- apply the Moore-Penrose pseudoinverse to ``H``,
- ``'eig'`` -- split ``H`` using an eigendecomposition,
- ``'ldl'`` -- split ``H`` using an LDL decomposition,
- ``'chol'`` -- split ``H`` using a Cholesky decomposition,
- ``'sqrt'`` -- split ``H`` using :func:`scipy.linalg.sqrtm()`, or
- ``'svd'`` -- split ``H`` using a singular value decomposition.
tsvd : pykoop.Tsvd
Singular value truncation method if ``inv_method='svd'``. If
``None``, economy SVD is used.
picos_eps : float
Tolerance used for strict LMIs. If nonzero, should be larger than
solver tolerance.
solver_params : Dict[str, Any]
Parameters passed to PICOS :func:`picos.Problem.solve()`. By
default, allows chosen solver to select its own tolerances.
"""
self.alpha = alpha
self.supply_rate = supply_rate
self.max_iter = max_iter
self.iter_atol = iter_atol
self.iter_rtol = iter_rtol
self.inv_method = inv_method
self.tsvd = tsvd
self.picos_eps = picos_eps
self.solver_params = solver_params
def _fit_regressor(self, X_unshifted: np.ndarray,
X_shifted: np.ndarray) -> np.ndarray:
# Set solver parameters
self.solver_params_ = self._default_solver_params.copy()
if self.solver_params is not None:
self.solver_params_.update(self.solver_params)
# Clone TSVD
self.tsvd_ = (sklearn.base.clone(self.tsvd)
if self.tsvd is not None else tsvd.Tsvd())
# Set regularization coefficients
# Get needed sizes
p = X_unshifted.shape[1]
p_theta = X_shifted.shape[1]
# Check that at least one input is present
if p_theta == p:
# If you remove the ``{p} features(s)`` part of this message,
# the scikit-learn estimator_checks will fail!
raise ValueError('`LmiEdmdDissipativityConstr()` requires an '
'input to function. `X` and `y` must therefore '
'have different numbers of features. `X` and `y` '
f'both have {p} feature(s).')
# Initialize ``P``
P = np.eye(p_theta)
# Solve optimization problem iteratively
U = np.zeros((p_theta, p))
self.objective_log_ = []
for k in range(self.max_iter):
# Formulate Problem A
problem_a = self._create_problem_a(X_unshifted, X_shifted, P)
# Solve Problem A
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem A{k}')
problem_a.solve(**self.solver_params_)
solution_status_a = problem_a.last_solution.claimedStatus
if solution_status_a != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_a`. Used last valid `U`. '
f'Solution status: `{solution_status_a}`.')
log.warn(self.stop_reason_)
break
U = np.array(problem_a.get_valued_variable('U'), ndmin=2)
self.objective_log_.append(problem_a.value)
if len(self.objective_log_) > 1:
curr_obj = self.objective_log_[-1]
prev_obj = self.objective_log_[-2]
diff_obj = prev_obj - curr_obj
log.info(f'Objective: {curr_obj}; Change: {diff_obj}.')
if np.allclose(curr_obj,
prev_obj,
atol=self.iter_atol,
rtol=self.iter_rtol):
self.stop_reason_ = f'Reached tolerance {diff_obj}'
break
# Formulate Problem B
problem_b = self._create_problem_b(U)
# Solve Problem B
if polite_stop:
self.stop_reason_ = 'User requested stop.'
log.warn(self.stop_reason_)
break
log.info(f'Solving problem B{k}')
problem_b.solve(**self.solver_params_)
solution_status_b = problem_b.last_solution.claimedStatus
if solution_status_b != 'optimal':
self.stop_reason_ = (
'Unable to solve `problem_b`. Used last valid `U`. '
'Solution status: f`{solution_status_b}`.')
log.warn(self.stop_reason_)
break
P = np.array(problem_b.get_valued_variable('P'), ndmin=2)
else:
self.stop_reason_ = f'Reached maximum iterations {self.max_iter}'
log.warn(self.stop_reason_)
self.n_iter_ = k + 1
coef = U.T
# Only useful for debugging
self.P_ = P
return coef
def _validate_parameters(self) -> None:
# Check other parameters
if self.max_iter <= 0:
raise ValueError('`max_iter` must be positive.')
if self.iter_atol < 0:
raise ValueError('`iter_atol` must be positive or zero.')
if self.iter_rtol < 0:
raise ValueError('`iter_rtol` must be positive or zero.')
def _create_problem_a(self, X_unshifted: np.ndarray, X_shifted: np.ndarray,
P: np.ndarray) -> picos.Problem:
"""Create first problem in iteration scheme."""
q = X_unshifted.shape[0]
problem_a = LmiEdmd._create_base_problem(X_unshifted, X_shifted,
self.alpha / q,
self.inv_method, self.tsvd_,
self.picos_eps)
# Extract information from problem
U = problem_a.variables['U']
# Get needed sizes
p_theta, p = U.shape
# Add new constraint
P = picos.Constant('P', P)
# Get weighted state space matrices
A, B, C, D = _create_ss(U, None)
# Add dissipativity constraint
if self.supply_rate is None:
n_u = p - p_theta
Xi = np.block([
[np.eye(p_theta), np.zeros((p_theta, n_u))],
[np.zeros((n_u, p_theta)), -np.eye(n_u)],
])
else:
Xi = self.supply_rate
Xi11 = picos.Constant('Xi_11', Xi[:p_theta, :p_theta])
Xi12 = picos.Constant('Xi_12', Xi[:p_theta, p_theta:])
Xi22 = picos.Constant('Xi_22', Xi[p_theta:, p_theta:])
problem_a.add_constraint(
picos.block([
[P - C.T * Xi11 * C, -C.T * Xi12, A.T * P],
[-Xi12.T * C, -Xi22, B.T * P],
[P * A, P * B, P],
]) >> self.picos_eps)
return problem_a
def _create_problem_b(self, U: np.ndarray) -> picos.Problem:
"""Create second problem in iteration scheme."""
# Create optimization problem
problem_b = picos.Problem()
# Get needed sizes
p_theta, p = U.shape
# Create constants
U = picos.Constant('U', U)
# Get weighted state space matrices
A, B, C, D = _create_ss(U, None)
# Create variables
P = picos.SymmetricVariable('P', A.shape[0])
# Add constraints
problem_b.add_constraint(P >> self.picos_eps)
# Add dissipativity constraint
if self.supply_rate is None:
n_u = p - p_theta
Xi = np.block([
[np.eye(p_theta), np.zeros((p_theta, n_u))],
[np.zeros((n_u, p_theta)), -np.eye(n_u)],
])
else:
Xi = self.supply_rate
Xi11 = picos.Constant('Xi_11', Xi[:p_theta, :p_theta])
Xi12 = picos.Constant('Xi_12', Xi[:p_theta, p_theta:])
Xi22 = picos.Constant('Xi_22', Xi[p_theta:, p_theta:])
problem_b.add_constraint(P >> self.picos_eps)
problem_b.add_constraint(
picos.block([
[P - C.T * Xi11 * C, -C.T * Xi12, A.T * P],
[-Xi12.T * C, -Xi22, B.T * P],
[P * A, P * B, P],
]) >> self.picos_eps)
# Set objective
problem_b.set_objective('find')
return problem_b
class LmiHinfZpkMeta(sklearn.base.BaseEstimator, sklearn.base.RegressorMixin):
"""Meta-estimator where H-infinity weight is specified in ZPK format.
H-infinity regularization weights must normally be specified in
discrete-time state space format. This can make cross-validating pole or
zero positions annoying. This meta-estimator wraps :class:`LmiEdmdHinfReg`
or :class:`LmiDmdcHinfReg` and allows the weighting filter to be specified
using zeros, poles, and a gain (i.e., ZPK format) in continuous-time.
All attributes with a trailing underscore are set by :func:`fit`.
Attributes
----------
hinf_regressor_ : koopman_pipeline.KoopmanRegressor
Fit internal regressor.
ss_ct_ : scipy.signal.lti
Continuous-times state space weight.
ss_dt_ : scipy.signal.dlti
Discrete-times state space weight.
n_features_in_ : int
Number of features input, including episode feature.
n_states_in_ : int
Number of states input.
n_inputs_in_ : int
Number of inputs input.
episode_feature_ : bool
Indicates if episode feature was present during :func:`fit`.
coef_ : np.ndarray
Fit coefficient matrix.
"""
def __init__(
self,
hinf_regressor: koopman_pipeline.KoopmanRegressor = None,
type: str = 'post',
zeros: Union[float, np.ndarray] = None,
poles: Union[float, np.ndarray] = None,
gain: float = 1,
discretization: str = 'bilinear',
t_step: float = 1,
units: str = 'rad/s',
) -> None:
"""Instantiate :class:`LmiHinfZpkMeta`.
Paramters
---------
hinf_regressor : koopman_pipeline.KoopmanRegressor
Instance of :class:`LmiEdmdHinfReg` or :class:`LmiDmdcHinfReg`.
type : str
Type of weight (``'pre'`` or ``'post'``).
zeros : Union[float, np.ndarray]
Filter zeros. If ``None``, no zeros are used. Accepts scalar input
if only one zero is required.
poles : Union[float, np.ndarray]
Filter poles. If ``None``, no poles are used. Accepts scalar input
if only one pole is required.
gain : float
Filter gain.
discretization : str
Discretization method supported by
:func:``scipy.signal.cont2discrete`` (except ``'gbt'``).
Specifically, possible valued are
- ``'bilinear'`` -- Tustin's approximation (recommended),
- ``'euler'`` -- Euler (or forward differencing) method,
- ``'backward_diff'`` -- backwards differencing method,
- ``'zoh'`` -- zero-order hold method,
- ``'foh'`` -- first-order hold method, or
- ``'impulse'`` -- equivalent impulse response method.
See [cont2discrete]_ for details.
t_step : float
Timestep beween samples. Used for discretization.
units : str
Units of poles and zeros. Possible values are
- ``'rad/s'`` -- radians per second,
- ``'hz'`` -- Hertz, or
- ``'normalized'`` -- normalized, where 1 is the Nyquist frequency.
Notes
-----
The zeros and poles in the weight should usually have negative real
parts! If you want a pole at ``10 rad/s``, then ``poles`` must be
``-10``.
References
----------
.. [cont2discrete] https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.cont2discrete.html#scipy-signal-cont2discrete # noqa: E501
Examples
--------
>>> est = pykoop.lmi_regressors.LmiHinfZpkMeta(
... hinf_regressor=pykoop.lmi_regressors.LmiEdmdHinfReg(),
... type='post',
... zeros=-0,
... poles=-5,
... gain=1,
... discretization='bilinear',
... t_step=0.1,
... )
>>> est.fit(X_msd, n_inputs=1, episode_feature=True)
LmiHinfZpkMeta(hinf_regressor=LmiEdmdHinfReg(), poles=-5, t_step=0.1,
zeros=0)
>>> est.hinf_regressor_
LmiEdmdHinfReg(weight=('post', array([[...]]), array([[...]]),
array([[...]]), array([[...]])))
"""
self.hinf_regressor = hinf_regressor
self.type = type
self.zeros = zeros
self.poles = poles
self.gain = gain
self.discretization = discretization
self.t_step = t_step
self.units = units
def fit(self,
X: np.ndarray,
y: np.ndarray = None,
n_inputs: int = 0,
episode_feature: bool = False) -> 'LmiHinfZpkMeta':
"""Fit the regressor.
If only ``X`` is specified, the regressor will compute its unshifted
and shifted versions. If ``X`` and ``y`` are specified, ``X`` is
treated as the unshifted data matrix, while ``y`` is treated as the
shifted data matrix.
Parameters
----------
X : np.ndarray
Full data matrix if ``y=None``. Unshifted data matrix if ``y`` is
specified.
y : np.ndarray
Optional shifted data matrix. If ``None``, shifted data matrix is
computed using ``X``.
n_inputs : int
Number of input features at the end of ``X``.
episode_feature : bool
True if first feature indicates which episode a timestep is from.
Returns
-------
LmiHinfZpkMeta
Instance of itself.
Raises
-----
ValueError
If constructor or fit parameters are incorrect.
"""
z_in = np.atleast_1d(self.zeros if self.zeros is not None else [])
p_in = np.atleast_1d(self.poles if self.poles is not None else [])
if self.units == 'rad/s':
z = z_in
p = p_in
elif self.units == 'hz':
z = 2 * np.pi * z_in
p = 2 * np.pi * p_in
elif self.units == 'normalized':
sampling_freq = 1 / self.t_step
nyquist_freq_hz = sampling_freq / 2
nyquist_freq_rads = 2 * np.pi * nyquist_freq_hz
z = nyquist_freq_rads * z_in
p = nyquist_freq_rads * p_in
else:
valid_units = ['rad/s', 'hz', 'normalized']
raise ValueError(f'`units` must be one of {valid_units}.')
self.ss_ct_ = scipy.signal.ZerosPolesGain(z, p, self.gain).to_ss()
self.ss_dt_ = self.ss_ct_.to_discrete(
self.t_step,
self.discretization,
)
weight = (
self.type,
self.ss_dt_.A,
self.ss_dt_.B,
self.ss_dt_.C,
self.ss_dt_.D,
)
self.hinf_regressor_ = sklearn.base.clone(self.hinf_regressor)
self.hinf_regressor_.set_params(weight=weight)
self.hinf_regressor_.fit(
X,
y,
n_inputs=n_inputs,
episode_feature=episode_feature,
)
self.n_features_in_ = self.hinf_regressor_.n_features_in_
self.n_states_in_ = self.hinf_regressor_.n_states_in_
self.n_inputs_in_ = self.hinf_regressor_.n_inputs_in_
self.episode_feature_ = self.hinf_regressor_.episode_feature_
self.coef_ = self.hinf_regressor_.coef_
return self
@sklearn.utils.metaestimators.if_delegate_has_method('hinf_regressor_')
def predict(self, X: np.ndarray) -> np.ndarray:
"""Perform a single-step prediction for each state in each episode.
Parameters
----------
X : np.ndarray
Data matrix.
Returns
-------
np.ndarray
Predicted data matrix.
"""
return self.hinf_regressor_.predict(X)
def _more_tags(self):
return {
'multioutput': True,
'multioutput_only': True,
}
def _create_ss(
U: np.ndarray,
weight: Optional[Tuple[str, np.ndarray, np.ndarray, np.ndarray,
np.ndarray]],
Q_hat: np.ndarray = None,
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""Augment Koopman system with weight if present.
Parameters
----------
U : np.ndarray
Koopman matrix containing ``A`` and ``B`` concatenated
horizontally.
weight : Optional[Tuple[str, np.ndarray, np.ndarray, np.ndarray,
np.ndarray]]
Tuple containing weight type (``'pre'`` or ``'post'``), and the
weight state space matrices (``A``, ``B``, ``C``, and ``D``). If
``None``, no weighting is used.
Q_hat : np.ndarray
Left singular vectors of shifted data matrix. Used to construct ``C``
matrix. Should only be used with DMDc methods.
Returns
-------
Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]
Weighted state space matrices (``A``, ``B``, ``C``, ``D``).
"""
p_theta = U.shape[0]
if weight is None:
A = U[:, :p_theta]
B = U[:, p_theta:]
C = picos.Constant('C',
Q_hat if Q_hat is not None else np.eye(p_theta))
D = picos.Constant('D', np.zeros((C.shape[0], B.shape[1])))
else:
Am = U[:, :p_theta]
Bm = U[:, p_theta:]
Cm = picos.Constant('Cm',
Q_hat if Q_hat is not None else np.eye(p_theta))
Dm = picos.Constant('Dm', np.zeros((Cm.shape[0], Bm.shape[1])))
if weight[0] == 'pre':
n_u = Bm.shape[1]
Aw_blk = linalg.block_diag(*([weight[1]] * n_u))
Bw_blk = linalg.block_diag(*([weight[2]] * n_u))
Cw_blk = linalg.block_diag(*([weight[3]] * n_u))
Dw_blk = linalg.block_diag(*([weight[4]] * n_u))
Aw = picos.Constant('Aw', Aw_blk)
Bw = picos.Constant('Bw', Bw_blk)
Cw = picos.Constant('Cw', Cw_blk)
Dw = picos.Constant('Dw', Dw_blk)
A = picos.block([
[Aw, 0],
[Bm * Cw, Am],
])
B = picos.block([
[Bw],
[Bm * Dw],
])
C = picos.block([
[Dm * Cw, Cm],
])
D = Dm * Dw
elif weight[0] == 'post':
n_x = Cm.shape[0]
Aw_blk = linalg.block_diag(*([weight[1]] * n_x))
Bw_blk = linalg.block_diag(*([weight[2]] * n_x))
Cw_blk = linalg.block_diag(*([weight[3]] * n_x))
Dw_blk = linalg.block_diag(*([weight[4]] * n_x))
Aw = picos.Constant('Aw', Aw_blk)
Bw = picos.Constant('Bw', Bw_blk)
Cw = picos.Constant('Cw', Cw_blk)
Dw = picos.Constant('Dw', Dw_blk)
A = picos.block([
[Am, 0],
[Bw * Cm, Aw],
])
B = picos.block([
[Bm],
[Bw * Dm],
])
C = picos.block([
[Dw * Cm, Cw],
])
D = Dw * Dm
else:
# Already checked, should not get here.
assert False
return (A, B, C, D)
def _add_twonorm(problem: picos.Problem, U: picos.RealVariable,
alpha_other: float, square_norm: bool,
picos_eps: float) -> picos.Problem:
"""Add matrix two norm regularizer to an optimization problem.
Parameters
----------
problem : picos.Problem
Optimization problem.
U : picos.RealVariable
Koopman matrix variable.
alpha_other : float
Regularization coefficient (already divided by ``q`` if applicable).
square_norm : bool
Square matrix two-norm.
picos_eps : float
Tolerance used for strict LMIs.
Returns
-------
picos.Problem
Optimization problem with regularizer added.
"""
# Validate ``alpha``
if alpha_other <= 0:
raise ValueError('Parameter `alpha` must be positive.')
# Extract information from problem
direction = problem.objective.direction
objective = problem.objective.function
# Get needed sizes
p_theta, p = U.shape
# Add new constraint
gamma = picos.RealVariable('gamma', 1)
problem.add_constraint(
picos.block([[picos.diag(gamma, p), U.T],
[U, picos.diag(gamma, p_theta)]]) >> picos_eps)
# Add term to cost function
alpha_scaled = picos.Constant('alpha_scaled_2', alpha_other)
if square_norm:
objective += alpha_scaled * gamma**2
else:
objective += alpha_scaled * gamma
problem.set_objective(direction, objective)
return problem
def _add_nuclear(problem: picos.Problem, U: picos.RealVariable,
alpha_other: float, square_norm: bool,
picos_eps: float) -> picos.Problem:
"""Add nuclear norm regularizer to an optimization problem.
Parameters
----------
problem : picos.Problem
Optimization problem.
U : picos.RealVariable
Koopman matrix variable.
alpha_other : float
Regularization coefficient (already divided by ``q`` if applicable).
square_norm : bool
Square nuclear norm.
picos_eps : float
Tolerance used for strict LMIs.
Returns
-------
picos.Problem
Optimization problem with regularizer added.
"""
# Validate ``alpha``
if alpha_other <= 0:
raise ValueError('Parameter `alpha` must be positive.')
# Extract information from problem
direction = problem.objective.direction
objective = problem.objective.function
# Get needed sizes
p_theta, p = U.shape
# Add new constraint
gamma = picos.RealVariable('gamma', 1)
W_1 = picos.SymmetricVariable('W_1', (p_theta, p_theta))
W_2 = picos.SymmetricVariable('W_2', (p, p))
problem.add_constraint(picos.trace(W_1) + picos.trace(W_2) <= 2 * gamma)
problem.add_constraint(picos.block([[W_1, U], [U.T, W_2]]) >> 0)
# Add term to cost function
alpha_scaled = picos.Constant('alpha_scaled_*', alpha_other)
if square_norm:
objective += alpha_scaled * gamma**2
else:
objective += alpha_scaled * gamma
problem.set_objective(direction, objective)
return problem
@memory.cache
def _calc_c_G_H(
X_unshifted: np.ndarray,
X_shifted: np.ndarray,
alpha: float,
) -> Tuple[float, np.ndarray, np.ndarray, Dict[str, Any]]:
"""Compute ``c``, ``G``, and ``H``.
Parameters
----------
X_unshifted : np.ndarray
Unshifted data matrix.
X_shifted: np.ndarray
Shifted data matrix.
alpha: float
Tikhonov regularization coefficient (divided by ``q``).
Returns
-------
Tuple[float, np.ndarray, np.ndarray, Dict[str, Any]]
Tuple containing ``c``, ``G``, and ``H``, along with numerical
statistics.
"""
# Compute G and H
Psi = X_unshifted.T
Theta_p = X_shifted.T
p, q = Psi.shape
# Compute G and Tikhonov-regularized H
G = (Theta_p @ Psi.T) / q
H_unreg = (Psi @ Psi.T) / q
# ``alpha`` is already divided by ``q`` to be consistent with ``G`` and
# ``H``
H_reg = H_unreg + (alpha * np.eye(p))
# Compute c
c = np.trace(Theta_p @ Theta_p.T) / q
# Check condition number and rank of G and H
cond_G = np.linalg.cond(G)
rank_G = np.linalg.matrix_rank(G)
shape_G = G.shape
cond_H_unreg = np.linalg.cond(H_unreg)
rank_H_unreg = np.linalg.matrix_rank(H_unreg)
shape_H_unreg = H_unreg.shape
cond_H_reg = np.linalg.cond(H_reg)
rank_H_reg = np.linalg.matrix_rank(H_reg)
shape_H_reg = H_reg.shape
stats = {
'cond_G': cond_G,
'rank_G': rank_G,
'shape_G': shape_G,
'cond_H_unreg': cond_H_unreg,
'rank_H_unreg': rank_H_unreg,
'shape_H_unreg': shape_H_unreg,
'cond_H_reg': cond_H_reg,
'rank_H_reg': rank_H_reg,
'shape_H_reg': shape_H_reg,
}
stats_str = {}
for key in stats:
if 'cond' in key:
stats_str[key] = f'{stats[key]:.2e}'
else:
stats_str[key] = stats[key]
log.info(f'`_calc_c_G_H()` stats: {stats_str}')
return c, G, H_reg, stats
@memory.cache
def _calc_Hinv(H: np.ndarray) -> np.ndarray:
"""Compute inverse of ``H``."""
return linalg.inv(H)
@memory.cache
def _calc_Hpinv(H: np.ndarray) -> np.ndarray:
"""Compute Moore-Penrose pseudoinverse of ``H``."""
return linalg.pinv(H)
@memory.cache
def _calc_VsqrtLmb(H: np.ndarray) -> np.ndarray:
"""Split ``H`` using its eigendecomposition."""
lmb, V = linalg.eigh(H)
return V @ np.diag(np.sqrt(lmb))
@memory.cache
def _calc_LsqrtD(H: np.ndarray) -> np.ndarray:
"""Split ``H`` using its LDL decomposition."""
L, D, _ = linalg.ldl(H)
return L @ np.sqrt(D)
@memory.cache
def _calc_L(H: np.ndarray) -> np.ndarray:
"""Split ``H`` using its Cholesky decomposition."""
return linalg.cholesky(H, lower=True)
@memory.cache
def _calc_sqrtH(H: np.ndarray) -> np.ndarray:
"""Split ``H`` using ``scipy.linalg.sqrtm``."""
# Since H is symmetric, its square root is symmetric.
# Otherwise, this would not work!
return linalg.sqrtm(H)
@memory.cache
def _calc_QSig(X: np.ndarray, alpha: float, tsvd: tsvd.Tsvd) -> np.ndarray:
"""Split ``H`` using the truncated SVD of ``X``.
``H`` is defined as:
H = 1/q * X_unshifted @ X_unshifted.T
Consider the SVD::
X_unshifted = Q @ Sig @ V.T
Without regularization, ``H`` is then::
H = 1/q * Q @ Sig**2 @ Q.T
= Q @ (Sig**2 / q) @ Q.T
= (Q @ sqrt(Sig**2 / q)) @ (sqrt(Sig**2 / q) @ Q.T)
With regularization::
H = (Q @ sqrt(Sig**2 / q + alpha)) @ (sqrt(Sig**2 / q + alpha) @ Q.T)
Parameters
----------
X : np.ndarray
``X``, where ``H = 1/q * X @ X.T``.
alpha : float
Tikhonov regularization coefficient (divided by ``q``).
tsvd : pykoop.Tsvd
Truncated singular value object.
Returns
-------
np.ndarray
Split ``H`` matrix.
"""
# SVD
tsvd.fit(X.T)
Qr = tsvd.left_singular_vectors_
sr = tsvd.singular_values_
# Regularize
q = X.shape[0]
# ``alpha`` is already divided by ``q`` to be consistent with ``G`` and
# ``H``.
sr_reg = np.sqrt((sr**2 / q) + alpha)
Sr_reg = np.diag(sr_reg)
# Multiply with Q and return
QSig = Qr @ Sr_reg
return QSig
sr_reg = np.sqrt((sr**2 / q) + alpha)
Sr_reg = np.diag(sr_reg)
# Multiply with Q and return
QSig = Qr @ Sr_reg
return QSig
| 38.274546
| 152
| 0.566537
| 12,287
| 103,303
| 4.562953
| 0.056401
| 0.024561
| 0.013734
| 0.012842
| 0.823758
| 0.800232
| 0.781646
| 0.77444
| 0.75837
| 0.745956
| 0
| 0.007278
| 0.328335
| 103,303
| 2,698
| 153
| 38.288732
| 0.800749
| 0.358218
| 0
| 0.751639
| 0
| 0
| 0.084001
| 0.004823
| 0
| 0
| 0
| 0
| 0.002913
| 1
| 0.037873
| false
| 0
| 0.00874
| 0.000728
| 0.081573
| 0.001457
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
febabfb3d781af11c7313785b90f9b7724850246
| 10,482
|
py
|
Python
|
datasets/mnist.py
|
rahul13ramesh/MultiTask_ModelZoo
|
4344edc9cf936d406e00adea50ed6cdf3af4704e
|
[
"MIT"
] | 1
|
2022-03-24T10:33:36.000Z
|
2022-03-24T10:33:36.000Z
|
datasets/mnist.py
|
rahul13ramesh/MultiTask_ModelZoo
|
4344edc9cf936d406e00adea50ed6cdf3af4704e
|
[
"MIT"
] | null | null | null |
datasets/mnist.py
|
rahul13ramesh/MultiTask_ModelZoo
|
4344edc9cf936d406e00adea50ed6cdf3af4704e
|
[
"MIT"
] | null | null | null |
from typing import List
import numpy as np
import torchvision
import torchvision.transforms as transforms
from numpy.random import default_rng
from datasets.modmnist import ModMNIST
from datasets.data import MultiTaskDataHandler
class SplitMNISTHandler(MultiTaskDataHandler):
"""
Load SplitMNIST dataset Split 10 classes into multiple tasks
"""
def __init__(self,
tasks: List[List[int]],
samples: int,
seed: int = -1) -> None:
"""
Download dataset and define transforms
Args:
- tasks: List of lists. Each inner list is a description of the
labels that describe a task
- samples: Number of samples for each label
- seed: Random seed
"""
mean_norm = [0.50]
std_norm = [0.25]
dat = ModMNIST
self.augment_transform = transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.ToTensor(),
transforms.Normalize(mean_norm, std_norm),
])
self.vanilla_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=mean_norm, std=std_norm)])
# Get dataset
self.trainset = dat(
root='./data', train=True, download=True,
transform=self.augment_transform)
self.testset = dat(
root='./data', train=False, download=True,
transform=self.vanilla_transform)
# Create a dataset
self.samples = samples
tr_ind, te_ind = [], []
tr_lab, te_lab = [], []
idx = np.array(range(5000))
if (seed is not None) and seed >= 0:
np.random.seed(seed)
if (seed is None) or (seed >= 0):
np.random.shuffle(idx)
else:
rng = -1 * seed
idx = np.roll(idx, rng)
# Filter dataset based on config file
for task_id, tsk in enumerate(tasks):
for lab_id, lab in enumerate(tsk):
task_tr_ind = np.where(np.isin(self.trainset.targets,
[lab]))[0]
# Consider subset of train dataset and entire test dataset
task_tr_ind = task_tr_ind[idx[:samples]]
task_te_ind = np.where(np.isin(self.testset.targets,
[lab]))[0]
tr_ind.append(task_tr_ind)
te_ind.append(task_te_ind)
curlab = (task_id, lab_id)
tr_vals = [curlab for _ in range(len(task_tr_ind))]
te_vals = [curlab for _ in range(len(task_te_ind))]
tr_lab.append(tr_vals)
te_lab.append(te_vals)
tr_ind = np.concatenate(tr_ind)
te_ind = np.concatenate(te_ind)
self.tr_ind = tr_ind
self.te_ind = te_ind
tr_lab, te_lab = np.concatenate(tr_lab), np.concatenate(te_lab)
self.trainset.data = self.trainset.data[tr_ind]
self.testset.data = self.testset.data[te_ind]
self.trainset.targets = [list(it) for it in tr_lab]
self.testset.targets = [list(it) for it in te_lab]
class RotatedMNISTHandler(MultiTaskDataHandler):
"""
Rotated MNIST dataset
"""
def __init__(self,
tasks: List[List[int]],
samples: int,
seed: int = -1) -> None:
"""
Download dataset and define transforms
Args:
- tasks: List of lists. Each inner list is a description of the
labels that describe a task. If the labe, is 10 * x + y, then the
label y is rotated by an angle of 10 * x (rotataions are only
multiples of 10)
- samples: Number of samples for each label
- seed: Random seed
"""
mean_norm = [0.50]
std_norm = [0.25]
dat = ModMNIST
self.augment_transform = transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.ToTensor(),
transforms.Normalize(mean_norm, std_norm),
])
self.vanilla_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=mean_norm, std=std_norm)])
# Get dataset
self.trainset = dat(
root='./data', train=True, download=True,
transform=self.augment_transform)
self.testset = dat(
root='./data', train=False, download=True,
transform=self.vanilla_transform)
# Create a dataset
self.samples = samples
tr_ind, te_ind = [], []
tr_lab, te_lab = [], []
idx = np.array(range(5000))
if (seed is not None) and seed >= 0:
np.random.seed(seed)
if (seed is None) or (seed >= 0):
np.random.shuffle(idx)
else:
rng = -1 * seed
idx = np.roll(idx, rng)
for task_id, tsk in enumerate(tasks):
for lab_id, lab in enumerate(tsk):
task_tr_ind = np.where(np.isin(self.trainset.targets,
[lab % 10]))[0]
task_tr_ind = task_tr_ind[idx[:samples]]
task_te_ind = np.where(np.isin(self.testset.targets,
[lab % 10]))[0]
tr_ind.append(task_tr_ind)
te_ind.append(task_te_ind)
curlab = (task_id, lab_id)
tr_vals = [curlab for _ in range(len(task_tr_ind))]
te_vals = [curlab for _ in range(len(task_te_ind))]
tr_lab.append(tr_vals)
te_lab.append(te_vals)
tr_ind = np.concatenate(tr_ind)
te_ind = np.concatenate(te_ind)
self.tr_ind = tr_ind
self.te_ind = te_ind
tr_lab, te_lab = np.concatenate(tr_lab), np.concatenate(te_lab)
self.trainset.data = self.trainset.data[tr_ind]
self.testset.data = self.testset.data[te_ind]
# Rotate images for each of the tasks based on task_id
for t_id in range(len(tasks)):
ang = (tasks[t_id][0] // 10) * 10
task_tr_flag = tr_lab[:, 0] == t_id
task_te_flag = te_lab[:, 0] == t_id
self.trainset.data[task_tr_flag] = transforms.functional.rotate(
self.trainset.data[task_tr_flag], angle=ang)
self.testset.data[task_te_flag] = transforms.functional.rotate(
self.testset.data[task_te_flag], angle=ang)
self.trainset.targets = [list(it) for it in tr_lab]
self.testset.targets = [list(it) for it in te_lab]
class PermutedMNISTHandler(MultiTaskDataHandler):
"""
Initialization for Permuted MNIST dataset
"""
def __init__(self,
tasks: List[List[int]],
samples: int,
seed: int = -1) -> None:
"""
Download dataset and define transforms
Args:
- tasks: List of lists. Each inner list is a description of the
labels that describe a task. A label of 10*x + y, then the digit y
is permuted using random seed (1000*x)
- samples: Number of samples for each label
- seed: Random seed
"""
mean_norm = [0.50]
std_norm = [0.25]
dat = ModMNIST
self.augment_transform = transforms.Compose([
transforms.RandomCrop(28, padding=4),
transforms.ToTensor(),
transforms.Normalize(mean_norm, std_norm),
])
self.vanilla_transform = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=mean_norm, std=std_norm)])
# Get dataset
self.trainset = dat(
root='./data', train=True, download=True,
transform=self.augment_transform)
self.testset = dat(
root='./data', train=False, download=True,
transform=self.vanilla_transform)
# Create a dataset
self.samples = samples
tr_ind, te_ind = [], []
tr_lab, te_lab = [], []
idx = np.array(range(5000))
if (seed is not None) and seed >= 0:
np.random.seed(seed)
if (seed is None) or (seed >= 0):
np.random.shuffle(idx)
else:
rng = -1 * seed
idx = np.roll(idx, rng)
# Filter dataset
for task_id, tsk in enumerate(tasks):
for lab_id, lab in enumerate(tsk):
task_tr_ind = np.where(np.isin(self.trainset.targets,
[lab % 10]))[0]
task_tr_ind = task_tr_ind[idx[:samples]]
task_te_ind = np.where(np.isin(self.testset.targets,
[lab % 10]))[0]
tr_ind.append(task_tr_ind)
te_ind.append(task_te_ind)
curlab = (task_id, lab_id)
tr_vals = [curlab for _ in range(len(task_tr_ind))]
te_vals = [curlab for _ in range(len(task_te_ind))]
tr_lab.append(tr_vals)
te_lab.append(te_vals)
tr_ind = np.concatenate(tr_ind)
te_ind = np.concatenate(te_ind)
self.tr_ind = tr_ind
self.te_ind = te_ind
tr_lab, te_lab = np.concatenate(tr_lab), np.concatenate(te_lab)
self.trainset.data = self.trainset.data[tr_ind]
self.testset.data = self.testset.data[te_ind]
# Permute images for each of the tasks based on task_id
for t_id in range(len(tasks)):
task_tr_flag = tr_lab[:, 0] == t_id
task_te_flag = te_lab[:, 0] == t_id
# Set seed based on task descriptors
tseed = (tasks[t_id][0] // 10) * 1000
rng_permute = default_rng(seed=tseed)
if (tseed == 0):
idx_permute = np.arange(784)
else:
idx_permute = rng_permute.permutation(784)
self.trainset.data[task_tr_flag] = self.trainset.data[
task_tr_flag].view(-1, 784)[:, idx_permute].view(-1, 28, 28)
self.testset.data[task_te_flag] = self.testset.data[
task_te_flag].view(-1, 784)[:, idx_permute].view(-1, 28, 28)
self.trainset.targets = [list(it) for it in tr_lab]
self.testset.targets = [list(it) for it in te_lab]
| 35.174497
| 80
| 0.546747
| 1,298
| 10,482
| 4.231895
| 0.114792
| 0.032769
| 0.024577
| 0.016384
| 0.863281
| 0.845804
| 0.81085
| 0.81085
| 0.81085
| 0.81085
| 0
| 0.017657
| 0.35165
| 10,482
| 297
| 81
| 35.292929
| 0.790612
| 0.127075
| 0
| 0.877551
| 0
| 0
| 0.004054
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015306
| false
| 0
| 0.035714
| 0
| 0.066327
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a001957396550125c160bc8e564417cd9e8f5be
| 172
|
py
|
Python
|
smartfit_booking/__init__.py
|
stivenramireza/smartfit-booking
|
095361a01071c56f2fea93028be31484d3f27a43
|
[
"MIT"
] | 1
|
2021-03-22T04:27:08.000Z
|
2021-03-22T04:27:08.000Z
|
smartfit_booking/__init__.py
|
stivenramireza/smartfit-booking
|
095361a01071c56f2fea93028be31484d3f27a43
|
[
"MIT"
] | null | null | null |
smartfit_booking/__init__.py
|
stivenramireza/smartfit-booking
|
095361a01071c56f2fea93028be31484d3f27a43
|
[
"MIT"
] | null | null | null |
from . import data_access_api, formatter, logger, storage, website_bot
__all__ = [
'data_access_api',
'formatter',
'logger',
'storage',
'website_bot'
]
| 19.111111
| 70
| 0.656977
| 19
| 172
| 5.421053
| 0.578947
| 0.194175
| 0.252427
| 0.427184
| 0.873786
| 0.873786
| 0.873786
| 0.873786
| 0
| 0
| 0
| 0
| 0.215116
| 172
| 9
| 71
| 19.111111
| 0.762963
| 0
| 0
| 0
| 0
| 0
| 0.277457
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
28e6ba0b7b6ef483c9a12d42fcf804a645c82333
| 35,236
|
py
|
Python
|
.virtual_documents/scripts/Python_code.ipynb.py
|
phu-att/data-viz-mtp
|
d8e033504858fc7a4b34243cab83fc8d4df55e9e
|
[
"MIT"
] | null | null | null |
.virtual_documents/scripts/Python_code.ipynb.py
|
phu-att/data-viz-mtp
|
d8e033504858fc7a4b34243cab83fc8d4df55e9e
|
[
"MIT"
] | null | null | null |
.virtual_documents/scripts/Python_code.ipynb.py
|
phu-att/data-viz-mtp
|
d8e033504858fc7a4b34243cab83fc8d4df55e9e
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import seaborn as sns
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import warnings
from pandas.core.common import SettingWithCopyWarning
from datetime import datetime, timedelta
import datetime as dt
warnings.simplefilter(action="ignore", category=SettingWithCopyWarning)
get_ipython().run_line_magic("matplotlib", " inline")
pd.options.display.max_columns = 50
from matplotlib import rc
# # get_ipython().run_line_magic("%", " setup")
rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
rc('text', usetex=True)
short = pd.read_csv('financials__short_term.csv')
long = pd.read_csv('financials__long_term.csv')
short['month'] = short['date'].str[5:7]
short['date'] = short['date'].str[:10]
short['date'] = pd.to_datetime(short['date'],format='get_ipython().run_line_magic("Y-%m-%d')", "")
short_group = short.groupby(['sector','country','month']).agg(np.mean).reset_index()
france_short = short_group[short_group['country']=='france']
germany_short = short_group[short_group['country']=='germany']
uk_short = short_group[short_group['country']=='united kingdom']
sectors = short['sector'].unique().tolist()
dataframes = [france_short,germany_short,uk_short]
for i in range(3):
df = dataframes[i]
df['pct_prices'] = df['price']
for s in sectors:
origin_price = df[df['sector']==s]['price'].iloc[0]
df.loc[df.sector==s,'pct_prices'] = ((df.loc[df.sector==s,'pct_prices'] - origin_price) / origin_price) * 100
plt.style.use('seaborn-whitegrid')
linestyle = ['solid', 'dashed', 'dashdot', 'solid', 'dashed', 'dashdot',]
colors = ['#377eb8', '#ff7f00', '#4daf4a','#f781bf', '#a65628', '#984ea3']
brexit_date = 5+23/31
months = ['Jan','Feb','Mar','Apr','May','June','July','Aug','Sep','Oct','Nov','Dec']
numeric_months = [0,1,2,3,4,5,6,7,8,9,10,11]
fig = plt.figure(figsize=(15,8))
ax6 = fig.add_subplot(1,1,1)
df = uk_short
for i in range(6):
sector = sectors[i]
selected_rows = df[df['sector'] ==sector]
if i not in [0,2]:
ax6.plot(selected_rows['month'],selected_rows['pct_prices'],label=str(sector),lw=4,ls=linestyle[1],alpha=0.4,color=colors[i],zorder=1)
if i in [0,2]:
ax6.plot(selected_rows['month'],selected_rows['pct_prices'],label=str(sector),lw=4,ls=linestyle[3],alpha=0.9,color=colors[i],zorder=1)
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
x_scatter = [brexit_date,11]
y_scatter = [y_bx,y_ey]
ax6.scatter(x_scatter,y_scatter,lw=7,color='black',zorder=2)
##Text for energy and materials
selected_rows = df[df['sector'] ==sectors[2]]
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
##Brexit
ax6.annotate(str(round(y_bx))+'get_ipython().run_line_magic("',", "")
xy=(brexit_date, y_bx+1),color='green',fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(brexit_date, y_bx+4),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Scatter at origin
ax6.scatter(0,0,lw=7,color='black',zorder=2)
##End of year
ax6.annotate(str(round(y_ey))+'get_ipython().run_line_magic("',", "")
xy=(11, y_ey-1),color='green',fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(11, y_ey-7),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Text for consumer discretionary
selected_rows = df[df['sector'] ==sectors[0]]
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
##Brexit
ax6.annotate(str(round(y_bx))+'get_ipython().run_line_magic("',", "")
xy=(brexit_date, y_bx+1),color=colors[0],fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(brexit_date, y_bx+4),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##End of year
ax6.annotate(str(round(y_ey))+'get_ipython().run_line_magic("',", "")
xy=(11, y_ey-1),color=colors[0],fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(11, y_ey-7),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Text for brexit date
ax6.axvline(brexit_date,0,0.02,color='black',lw=1.5)
ax6.text(brexit_date-0.3,-22,'23/06',fontsize=15,fontweight=575)
#Customizing x and y tick labels
ax6.set_ylim([-25,25])
ax6.set_yticks([-20,-10,0,10,20])
ax6.set_yticklabels(labels=[-20,-10,0,10,20],fontsize=15)
ax6.set_xlim([-0.1,11.1])
ax6.set_xticks(numeric_months)
ax6.set_xticklabels(labels=months,rotation=45,fontsize=15)
#Delete top and right spines
ax6.spines["right"].set_visible(False)
ax6.spines["top"].set_visible(False)
#Legends and Labels
ax6.legend(loc='upper left',fontsize=13.5)
ax6.set_xlabel('Month Of The Year \n Figure 1',fontsize=15,fontweight=600)
ax6.set_ylabel('% Changes',fontsize=15,fontweight=600)
#Set Title
ax6.set_title('UK Share Price Percentage Change \n by Sector (Daily, 2016)',fontweight=600,fontsize=25)
#Horizontal Line at 100
ax6.axhline(0,color='black',alpha=0.8,lw=2)
#Remove spines
ax6.spines['right'].set_visible(False)
ax6.spines['left'].set_visible(False)
ax6.spines['top'].set_visible(False)
ax6.spines['bottom'].set_visible(False)
plt.show()
long['date'] = long['date'].str[:10]
long['date'] = pd.to_datetime(long['date'],format='get_ipython().run_line_magic("Y-%m')", " ")
long_group = long.groupby(['country','date']).agg(np.mean).reset_index()
germany_long = long_group[long_group['country']=='germany']
france_long = long_group[long_group['country']=='france']
uk_long = long_group[long_group['country']=='united kingdom']
dataframes = [germany_long,france_long,uk_long]
for i in range(3):
df = dataframes[i]
df['pct_prices'] = df['price']
origin_price = df['price'].iloc[0]
df.loc[:,'pct_prices'] = ((df.loc[:,'pct_prices'] -origin_price) / origin_price)*100
plt.style.use('seaborn-whitegrid')
colors = ['#938DD2', '#82C0E9', 'crimson']
dataframes = [germany_long,france_long,uk_long]
names = ['Germany','France','United Kingdom']
years = [pd.Timestamp(2014,1,1),pd.Timestamp(2015,1,1),pd.Timestamp(2016,1,1),pd.Timestamp(2017,1,1),pd.Timestamp(2018,1,1),pd.Timestamp(2019,1,1)]
x_labels = ['2014','2015','2016','2017','2018','2019']
y_labels = list(range(-10,80,10))
fig = plt.figure(figsize=(15,8))
ax1 = fig.add_subplot(1,1,1)
for i in range(3):
df = dataframes[i]
ax1.plot(df['date'],df['pct_prices'],label=names[i],lw=6,zorder=1,color=colors[i])
#labels for x-axis
ax1.set_xticks(years)
ax1.set_xticklabels(labels=x_labels,rotation=0,fontsize=16,fontweight=575)
ax1.set_xlim(pd.Timestamp(2013,12,1),pd.Timestamp(2019,2,1))
#Horizontal line at 100
ax1.axhline(0,color='black',lw=2)
#Labels for y-axis
ax1.set_yticks(y_labels)
ax1.set_yticklabels(y_labels,fontsize=16,fontweight=575)
ax1.set_ylim([-9,79])
#Text for labeling countries
# plt.text(pd.Timestamp(2017,10,1),8,'United Kingdom',fontweight=600,fontsize=20)
# plt.text(pd.Timestamp(2017,11,1),37,'Germany',fontweight=600,fontsize=20)
# plt.text(pd.Timestamp(2017,10,1),62,'France',fontweight=600,fontsize=20)
#Initial index point at 100
ax1.scatter(pd.Timestamp(2014,1,1),0,lw=10,color='black',zorder=2)
#France end of 2018 index point
y_ey = france_long.loc[france_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,11,20),38,str(round(y_ey[0]))+'get_ipython().run_line_magic("',fontsize=15)", "")
#Germany end of 2018 index point
y_ey = germany_long.loc[germany_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,12,1),15,str(round(y_ey[0]))+'get_ipython().run_line_magic("',fontsize=15)", "")
#UK end of 2018 index point
y_ey = uk_long.loc[uk_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,12,1),1,str(round(y_ey[0]))+'get_ipython().run_line_magic("',fontsize=15)", "")
#Date of brexit
ax1.vlines(pd.Timestamp(2016,6,23),0,30,color='black',ls='--')
ax1.text(pd.Timestamp(2016,5,1),32,'Brexit',color='black',fontweight=700,fontsize=17)
#Date of drop
ax1.vlines(pd.Timestamp(2018,8,15),0,100,color='black',ls='--')
ax1.text(pd.Timestamp(2018,6,15),-5,'June-2018',fontweight=600,fontsize=13)
# remove spines
ax1.spines['right'].set_visible(False)
ax1.spines['left'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.spines['bottom'].set_visible(False)
#Set title
ax1.set_ylabel('Percentage Changes', fontsize=20,fontweight=600)
ax1.set_title('UK, Germany and France Share Price Percentage Change \n (Monthly, 2014-2018)',fontweight=600,fontsize=20)
ax1.legend(loc='upper left',fontsize=20)
ax1.set_xlabel('Figure 2', fontsize = 12, fontweight = 600)
plt.show()
# extract the UK
long_uk = long.loc[long['country'] == 'united kingdom', :]
# create a dummy for to replace Germany and France with eu
long_eu = long
# create a def to combine Germany and France into one entity
def france_germany(element):
"""
this function rename input with names france and germany with eu and else with united kingdom
element: iterable variable, such as Pandas series or list
"""
if element in ['france','germany']:
return 'eu'
else:
return 'united kingdom'
long_eu['country'] = long_eu['country'].apply(france_germany)
# extract eu
long_eu = long_eu.loc[long_eu['country'] == 'eu', :]
# groupby date and sector, and calculate mean accordingly
long_uk_a = long_uk.groupby(['date', 'sector'])['price'].agg(np.mean).reset_index()
long_eu_a = long_eu.groupby(['date', 'sector'])['price'].agg(np.mean).reset_index()
# uk consumer discretionary
fin_cd_uk = long_uk_a.loc[long_uk_a['sector'] == 'consumer discretionary']
cd_uk = fin_cd_uk.iloc[[0,29,59], ]
cd_uk.loc[: , 'percentage_change'] = (cd_uk['price'].pct_change())*100
# eu consumer discretionary
fin_cd_eu = long_eu_a.loc[long_eu_a['sector'] == 'consumer discretionary']
cd_eu = fin_cd_eu.iloc[[0, 29,59], ]
cd_eu.loc[: , 'percentage_change'] = (cd_eu['price'].pct_change())*100
# relative strength of the UK discretionary sector
cd_uk_rel = cd_eu[['sector']]
cd_uk_rel['relative performance'] = cd_uk.percentage_change - cd_eu.percentage_change
cd_prb = cd_uk_rel.iloc[1, 1]
cd_pob = cd_uk_rel.iloc[2,1 ]
# uk energy and materials
fin_em_uk = long_uk_a.loc[long_uk_a['sector'] == 'energy and materials']
em_uk = fin_em_uk.iloc[[0,29,59], ]
em_uk.loc[: , 'percentage_change'] = (em_uk['price'].pct_change())*100
# eu energy and materials
fin_em_eu = long_eu_a.loc[long_eu_a['sector'] == 'energy and materials']
em_eu = fin_em_eu.iloc[[0, 29,59], ]
em_eu.loc[: , 'percentage_change'] = (em_eu['price'].pct_change())*100
# relative strength of the UK energy and materials
em_uk_rel = em_eu[['sector']]
em_uk_rel['relative performance'] = em_uk.percentage_change - em_eu.percentage_change
em_prb = em_uk_rel.iloc[1,1 ]
em_pob = em_uk_rel.iloc[2,1 ]
# uk indrustrials
fin_i_uk = long_uk_a.loc[long_uk_a['sector'] == 'industrials']
i_uk = fin_i_uk.iloc[[0,29,59], ]
i_uk.loc[: , 'percentage_change'] = (i_uk['price'].pct_change())*100
# eu indrustrials
fin_i_eu = long_eu_a.loc[long_eu_a['sector'] == 'industrials']
i_eu = fin_i_eu.iloc[[0, 29,59], ]
i_eu.loc[: , 'percentage_change'] = (i_eu['price'].pct_change())*100
# relative strength of the UK indrustrials
i_uk_rel = i_eu[['sector']]
i_uk_rel['relative performance'] = i_uk.percentage_change - i_eu.percentage_change
i_prb = i_uk_rel.iloc[1,1 ]
i_pob = i_uk_rel.iloc[2,1 ]
# uk consumer stables
fin_cs_uk = long_uk_a.loc[long_uk_a['sector'] == 'consumer staples']
cs_uk = fin_cs_uk.iloc[[0,29,59], ]
cs_uk.loc[: , 'percentage_change'] = (cs_uk['price'].pct_change())*100
# eu consumer stables
fin_cs_eu = long_eu_a.loc[long_eu_a['sector'] == 'consumer staples']
cs_eu = fin_cs_eu.iloc[[0, 29,59], ]
cs_eu.loc[: , 'percentage_change'] = (cs_eu['price'].pct_change())*100
# relative strength of the UK consumer stables
cs_uk_rel = cs_eu[['sector']]
cs_uk_rel['relative performance'] = cs_uk.percentage_change - cs_eu.percentage_change
cs_prb = cs_uk_rel.iloc[1,1 ]
cs_pob = cs_uk_rel.iloc[2,1 ]
# uk financials
fin_f_uk = long_uk_a.loc[long_uk_a['sector'] == 'financials']
f_uk = fin_f_uk.iloc[[0,29,59], ]
f_uk.loc[: , 'percentage_change'] = (f_uk['price'].pct_change())*100
# eu financials
fin_f_eu = long_eu_a.loc[long_eu_a['sector'] == 'financials']
f_eu = fin_f_eu.iloc[[0, 29,59], ]
f_eu.loc[: , 'percentage_change'] = (f_eu['price'].pct_change())*100
# relative strength of the UK consumer stables
f_uk_rel = f_eu[['sector']]
f_uk_rel['relative performance'] = f_uk.percentage_change - f_eu.percentage_change
f_prb = f_uk_rel.iloc[1,1 ]
f_pob = f_uk_rel.iloc[2,1 ]
# uk utilities
fin_u_uk = long_uk_a.loc[long_uk_a['sector'] == 'utilities']
u_uk = fin_u_uk.iloc[[0,29,59], ]
u_uk.loc[: , 'percentage_change'] = (u_uk['price'].pct_change())*100
# eu utilities
fin_u_eu = long_eu_a.loc[long_eu_a['sector'] == 'utilities']
u_eu = fin_u_eu.iloc[[0, 29,59], ]
u_eu.loc[: , 'percentage_change'] = (u_eu['price'].pct_change())*100
# relative strength of the UK utilities
u_uk_rel = u_eu[['sector']]
u_uk_rel['relative performance'] = u_uk.percentage_change - u_eu.percentage_change
u_prb = u_uk_rel.iloc[1,1]
u_pob = u_uk_rel.iloc[2,1 ]
# populate a dataframe to use in the pre-Brexit visualization
rel_str_prb = [cd_prb,em_prb,i_prb,cs_prb,f_prb,u_prb]
sectors = ['Consumer \n Discretionary', 'Energy and \n Materials', 'Industrials', 'Consumer \n Staples', 'Financials', 'Utilities']
res_preb = pd.DataFrame({'sector' : sectors, 'relative_performance_before_brexit' : rel_str_prb})
res_preb['colors'] = ['crimson' if x < 0 else 'olivedrab' for x in res_preb['relative_performance_before_brexit']]
res_preb.sort_values('relative_performance_before_brexit', inplace=True)
res_preb.reset_index(inplace=True)
# get_ipython().run_line_magic("%", " draw plot")
plt.style.use('seaborn-whitegrid')
# create plot
fig = plt.figure(figsize=(12,8), dpi= 80)
# create plot
ax2 = fig.add_subplot(1, 1, 1)
# add a horizontal line
ax2.hlines(y=res_preb.index, xmin=0, xmax=res_preb.relative_performance_before_brexit,
color=res_preb.colors, alpha=0.3, linewidth=5)
# scatter
ax2.scatter(res_preb.relative_performance_before_brexit, res_preb.index, s=450, alpha=.6, color=res_preb.colors)
for x, y, label in zip(res_preb.relative_performance_before_brexit, res_preb.index, res_preb.relative_performance_before_brexit):
t = ax2.text(x, y, int(label), horizontalalignment='center',
verticalalignment='center', fontdict={'color':'white'},fontweight=700,fontsize=12)
# axes
ax2.set_xticks([-80, -60, -40, -20, 0, 20, 40, 60, 80])
ax2.set_xticklabels([-80, -60, -40, -20, 0, 20, 40, 60, 80],fontsize=12,fontweight=575)
ax2.set_yticks(res_preb.index)
ax2.set_yticklabels(res_preb.sector,fontsize=12,fontweight=575)
ax2.set_title('Pre-Brexit UK Share Price Percentage Change\n Relative to EU (2014-2016)', fontsize=20,fontweight=600)
ax2.set_xlabel('Share prices, \% changes\n Figure 3',fontsize=17,fontweight=600)
# grid
ax2.grid(linestyle='--', alpha=0.5)
# remove spines
ax2.spines['right'].set_visible(False)
ax2.spines['left'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.spines['bottom'].set_visible(False)
plt.show()
# populate a dataframe to use in the post-Brexit visualization
rel_str_pob = [cd_pob,em_pob,i_pob,cs_pob,f_pob,u_pob]
sectors = ['Consumer \n Discretionary', 'Energy and \n Materials', 'Industrials', 'Consumer \n Staples', 'Financials', 'Utilities']
res_prob = pd.DataFrame({'sector' : sectors, 'relative_performance_post_brexit' : rel_str_pob})
res_prob['colors'] = ['crimson' if x < 0 else 'olivedrab' for x in res_prob['relative_performance_post_brexit']]
res_prob.sort_values('relative_performance_post_brexit', inplace=True)
res_prob.reset_index(inplace=True)
pd.set_option('precision', 1)
# get_ipython().run_line_magic("%", " draw plot")
# create plot
fig = plt.figure(figsize=(12,8), dpi= 80)
# create plot
ax4 = fig.add_subplot(1, 1, 1)
# scatter
ax4.scatter(res_prob.relative_performance_post_brexit, res_prob.index, s=450, alpha=.6, color=res_prob.colors)
for x, y, label in zip(res_prob.relative_performance_post_brexit, res_prob.index, res_prob.relative_performance_post_brexit):
t = ax4.text(x, y, int(label), horizontalalignment='center',
verticalalignment='center', fontdict={'color':'white'},fontweight=700,fontsize=12)
# add a horizontal line
ax4.hlines(y=res_preb.index, xmin=0, xmax=res_prob.relative_performance_post_brexit,
color=res_preb.colors, alpha=0.3, linewidth=5)
# axes
ax4.set_xticks([-80, -60, -40, -20, 0, 20, 40, 60, 80])
ax4.set_xticklabels([-80, -60, -40, -20, 0, 20, 40, 60, 80],fontsize=12,fontweight=575)
ax4.set_yticks(res_prob.index)
ax4.set_yticklabels(res_prob.sector,fontsize=12,fontweight=575)
ax4.set_title('Post-Brexit UK Share Price Percentage Change\n Relative to EU (2016-2018)', fontsize=20,fontweight = 600)
ax4.set_xlabel('Share prices, \% changes \n Figure 4',fontsize=17,fontweight=600)
# add patch
p1 = patches.Rectangle((-50, 0.65), width=25, height=0.7, alpha=.2, facecolor='red')
p2 = patches.Rectangle((-3, 3.67), width=25, height=0.7, alpha=.2, facecolor='green')
plt.gca().add_patch(p1)
plt.gca().add_patch(p2)
# annotations
ax4.annotate('Biggest gain',
xy=(-2, 4), xytext=(-40, 4), xycoords='data',
fontsize=14, ha='center', va='center', fontweight=600,
arrowprops=dict(arrowstyle='->',
lw=1.0, color='grey'), color='black')
ax4.annotate('Biggest loss',
xy=(0, 1), xytext=(40, 1), xycoords='data',
fontsize=14, ha='center', va='center', fontweight=600,
arrowprops=dict(arrowstyle='->',
lw=1.0, color='grey'), color='black')
# remove spines
ax4.spines['right'].set_visible(False)
ax4.spines['left'].set_visible(False)
ax4.spines['top'].set_visible(False)
ax4.spines['bottom'].set_visible(False)
# grid
ax4.grid(linestyle='--', alpha=0.5)
# ax4.xlim(-2.5, 2.5)
plt.show()
# groupby year and sector
long = long_uk.groupby([pd.Grouper(key = 'date', freq='Y'), 'sector'])
# sum assets based on year and sector
long = long['assets'].agg(np.sum).reset_index()
# Pre-brexit
# -- extract pre-brexit years (2014 - 2016)
start = dt.datetime.strptime('2014-12-31', 'get_ipython().run_line_magic("Y-%m-%d')", "")
end = dt.datetime.strptime('2016-12-31', 'get_ipython().run_line_magic("Y-%m-%d')", "")
long_preb = long.loc[(long.date >= start) & (long.date <= end), :]
# -- average assets for each sector for 3 years
long_preb = long_preb.groupby(['sector'])
long_preb = long_preb['assets'].agg(np.mean).reset_index()
# -- calculate percentage of each element of the total
long_preb['percent'] = (long_preb['assets'] / long_preb['assets'].sum()) * 100
# Post-brexit
# --extract post-brexit years (2017 - 2018)
start_1 = dt.datetime.strptime('2017-12-31', 'get_ipython().run_line_magic("Y-%m-%d')", "")
end_1 = dt.datetime.strptime('2018-12-31', 'get_ipython().run_line_magic("Y-%m-%d')", "")
long_posb = long.loc[(long.date >= start_1) & (long.date <= end_1), :]
# -- average the rest
long_posb = long_posb.groupby(['sector'])
long_posb = long_posb['assets'].agg(np.mean).reset_index()
# -- calculate percentage of each element of the total
long_posb['percent'] = (long_posb['assets'] / long_posb['assets'].sum()) * 100
# create plot
fig, ax3 = plt.subplots(figsize=(15, 8))
# generate data to fill
a = [str(round(i, 1)) for i in long_preb['percent']]
labels = list(long_preb['sector'])
assets = a
# create a donut chart
wedges, texts = ax3.pie(a, wedgeprops=dict(width=0.5), startangle=-40)
# create textboxes and annotations
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-", linewidth=2),
bbox=bbox_props, zorder=0, va="center")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = "angle,angleA=0,angleB={}".format(ang)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax3.annotate(labels[i].upper() + ':' + a[i] + '\get_ipython().run_line_magic("',", " xy=(x, y),fontsize=12,fontweight=575, xytext=(1.35*np.sign(x), 1.4*y),")
horizontalalignment=horizontalalignment, **kw)
# set title
ax3.set_title("Pre-Brexit UK Average Asset \n Distribution by Sector (2014-2016)",fontsize=20,fontweight=600)
ax3.set_xlabel('Figure 5',fontsize=15,fontweight=600)
plt.show()
# create plot
fig, ax5 = plt.subplots(figsize=(8, 6),)
# generate data to fill
a = [str(round(i, 1)) for i in long_posb['percent']]
labels = list(long_posb['sector'])
assets = a
# create a donut chart
wedges, texts = ax5.pie(a, wedgeprops=dict(width=0.5), startangle=-40)
# create textboxes and annotations
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-", linewidth=2),
bbox=bbox_props, zorder=0, va="center")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = "angle,angleA=0,angleB={}".format(ang)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax5.annotate(labels[i].upper() + ':' + a[i] + '\get_ipython().run_line_magic("',", " xy=(x, y),fontsize=11,fontweight=575, xytext=(1.35*np.sign(x), 1.4*y),")
horizontalalignment=horizontalalignment, **kw)
# set title
ax5.set_title("Post-Brexit UK Average Asset \n Distribution by Sector (2017-2018)",fontsize=20,fontweight=600)
ax5.set_xlabel('Figure 6',fontsize=15,fontweight=600)
plt.show()
plt.style.use('seaborn-whitegrid')
import matplotlib.gridspec as gs
fig1 = plt.figure(figsize=(17,40))
gs1 = gs.GridSpec(nrows=4,ncols=2)
## First Plot
ax1 = plt.subplot(gs1[0, 0:2])
colors = ['#938DD2', '#82C0E9', 'crimson']
dataframes = [germany_long,france_long,uk_long]
names = ['Germany','France','United Kingdom']
years = [pd.Timestamp(2014,1,1),pd.Timestamp(2015,1,1),pd.Timestamp(2016,1,1),pd.Timestamp(2017,1,1),pd.Timestamp(2018,1,1),pd.Timestamp(2019,1,1)]
x_labels = ['2014','2015','2016','2017','2018','2019']
y_labels = list(range(-10,80,10))
for i in range(3):
df = dataframes[i]
ax1.plot(df['date'],df['pct_prices'],label=names[i],lw=6,zorder=1,color=colors[i])
#labels for x-axis
ax1.set_xticks(years)
ax1.set_xticklabels(labels=x_labels,rotation=0,fontsize=16,fontweight=575)
ax1.set_xlim(pd.Timestamp(2013,12,1),pd.Timestamp(2019,2,1))
#Horizontal line at 100
ax1.axhline(0,color='black',lw=2)
#Labels for y-axis
ax1.set_yticks(y_labels)
ax1.set_yticklabels(y_labels,fontsize=16,fontweight=575)
ax1.set_ylim([-9,79])
#Initial index point at 100
ax1.scatter(pd.Timestamp(2014,1,1),0,lw=10,color='black',zorder=2)
#France end of 2018 index point
y_ey = france_long.loc[france_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,11,20),38,str(round(y_ey[0]))+'get_ipython().run_line_magic("',fontsize=15)", "")
#Germany end of 2018 index point
y_ey = germany_long.loc[germany_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,12,1),15,str(round(y_ey[0]))+'get_ipython().run_line_magic("',fontsize=15)", "")
#UK end of 2018 index point
y_ey = uk_long.loc[uk_long['date'] == pd.Timestamp(2018,12,1),'pct_prices'].values
ax1.scatter(pd.Timestamp(2018,12,1),y_ey,lw=10,color='black',zorder=2)
ax1.text(pd.Timestamp(2018,12,1),1,str(round(y_ey[0]))+'\get_ipython().run_line_magic("',fontsize=15)", "")
#Date of brexit
ax1.vlines(pd.Timestamp(2016,6,23),0,30,color='black',ls='--')
ax1.text(pd.Timestamp(2016,5,1),32,'Brexit',color='black',fontweight=700,fontsize=17)
#Date of drop
ax1.vlines(pd.Timestamp(2018,8,15),0,100,color='black',ls='--')
ax1.text(pd.Timestamp(2018,6,15),-5,'June-2018',fontweight=600,fontsize=13)
#Set title
ax1.set_ylabel('Percentage Changes', fontsize=20,fontweight=600)
ax1.set_title('UK, Germany and France Share Price Percentage Change \n (Monthly, 2014-2018)',fontweight=600,fontsize=20)
ax1.legend(loc='upper left',fontsize=20)
ax1.spines['right'].set_visible(False)
ax1.spines['left'].set_visible(False)
ax1.spines['top'].set_visible(False)
ax1.spines['bottom'].set_visible(False)
## Second Plot
ax2 = plt.subplot(gs1[1, 0])
ax2.hlines(y=res_preb.index, xmin=0, xmax=res_preb.relative_performance_before_brexit,
color=res_preb.colors, alpha=0.3, linewidth=5)
# scatter
ax2.scatter(res_preb.relative_performance_before_brexit, res_preb.index, s=450, alpha=.6, color=res_preb.colors)
for x, y, label in zip(res_preb.relative_performance_before_brexit, res_preb.index, res_preb.relative_performance_before_brexit):
t = ax2.text(x, y, int(label), horizontalalignment='center',
verticalalignment='center', fontdict={'color':'white'})
# axes
ax2.set_xticks([-80, -60, -40, -20, 0, 20, 40, 60, 80])
ax2.set_xticklabels([-80, -60, -40, -20, 0, 20, 40, 60, 80],fontsize=11,fontweight=575)
ax2.set_yticks(res_preb.index)
ax2.set_yticklabels(res_preb.sector,fontsize=11,fontweight=575)
ax2.set_title('Pre-Brexit UK Share Price Percentage Change\n Relative to EU (2014-2016)', fontsize=20,fontweight=600)
ax2.set_xlabel('Share prices, \% changes',fontsize=17,fontweight=600)
# grid
ax2.grid(linestyle='--', alpha=0.5)
# remove spines
ax2.spines['right'].set_visible(False)
ax2.spines['left'].set_visible(False)
ax2.spines['top'].set_visible(False)
ax2.spines['bottom'].set_visible(False)
## Third Plot
ax3 = plt.subplot(gs1[1, 1])
a = [str(round(i, 1)) for i in long_preb['percent']]
labels = list(long_preb['sector'])
assets = a
# create a donut chart
wedges, texts = ax3.pie(a, wedgeprops=dict(width=0.5), startangle=-40)
# create textboxes and annotations
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-", linewidth=2),
bbox=bbox_props, zorder=0, va="center")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = "angle,angleA=0,angleB={}".format(ang)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax3.annotate(labels[i].upper() + ':' + a[i] + '\get_ipython().run_line_magic("',", " xy=(x, y),fontsize=12,fontweight=575, xytext=(1.35*np.sign(x), 1.4*y),")
horizontalalignment=horizontalalignment, **kw)
# set title
ax3.set_title("Pre-Brexit UK Average Asset \n Distribution by Sector (2014-2016)",fontsize=20,fontweight=600)
## Fourth Plot
ax4 = plt.subplot(gs1[2, 0])
ax4.scatter(res_prob.relative_performance_post_brexit, res_prob.index, s=450, alpha=.6, color=res_prob.colors)
for x, y, label in zip(res_prob.relative_performance_post_brexit, res_prob.index, res_prob.relative_performance_post_brexit):
t = ax4.text(x, y, int(label), horizontalalignment='center',
verticalalignment='center', fontdict={'color':'white'})
# add a horizontal line
ax4.scatter(res_prob.relative_performance_post_brexit, res_prob.index, s=450, alpha=.6, color=res_prob.colors)
for x, y, label in zip(res_prob.relative_performance_post_brexit, res_prob.index, res_prob.relative_performance_post_brexit):
t = ax4.text(x, y, int(label), horizontalalignment='center',
verticalalignment='center', fontdict={'color':'white'})
# add a horizontal line
ax4.hlines(y=res_prob.index, xmin=0, xmax=res_prob.relative_performance_post_brexit,
color=res_prob.colors, alpha=0.3, linewidth=5)
# axes
ax4.set_xticks([-80, -60, -40, -20, 0, 20, 40, 60, 80])
ax4.set_xticklabels([-80, -60, -40, -20, 0, 20, 40, 60, 80],fontsize=11,fontweight=575)
ax4.set_yticks(res_prob.index)
ax4.set_yticklabels(res_prob.sector,fontsize=11,fontweight=575)
ax4.set_title('Post-Brexit UK Share Price Percentage Change\n Relative to EU (2016-2018)', fontsize=20,fontweight = 600)
ax4.set_xlabel('Share prices, \% changes',fontsize=17,fontweight=600)
# add patch
p1 = patches.Rectangle((-50, 0.65), width=25, height=0.7, alpha=.2, facecolor='red')
p2 = patches.Rectangle((-3, 3.67), width=25, height=0.7, alpha=.2, facecolor='green')
plt.gca().add_patch(p1)
plt.gca().add_patch(p2)
# annotations
ax4.annotate('Biggest gain',
xy=(-2, 4), xytext=(-40, 4), xycoords='data',
fontsize=14, ha='center', va='center', fontweight=600,
arrowprops=dict(arrowstyle='->',
lw=1.0, color='grey'), color='black')
ax4.annotate('Biggest loss',
xy=(0, 1), xytext=(40, 1), xycoords='data',
fontsize=14, ha='center', va='center', fontweight=600,
arrowprops=dict(arrowstyle='->',
lw=1.0, color='grey'), color='black')
# remove spines
ax4.spines['right'].set_visible(False)
ax4.spines['left'].set_visible(False)
ax4.spines['top'].set_visible(False)
ax4.spines['bottom'].set_visible(False)
# grid
ax4.grid(linestyle='--', alpha=0.5)
## Fifth Plot
plt.style.use('seaborn-white')
ax5 = plt.subplot(gs1[2, 1])
a = [str(round(i, 1)) for i in long_posb['percent']]
labels = list(long_posb['sector'])
assets = a
# create a donut chart
wedges, texts = ax5.pie(a, wedgeprops=dict(width=0.5), startangle=-40)
# create textboxes and annotations
bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-", linewidth=2),
bbox=bbox_props, zorder=0, va="center")
for i, p in enumerate(wedges):
ang = (p.theta2 - p.theta1)/2. + p.theta1
y = np.sin(np.deg2rad(ang))
x = np.cos(np.deg2rad(ang))
horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
connectionstyle = "angle,angleA=0,angleB={}".format(ang)
kw["arrowprops"].update({"connectionstyle": connectionstyle})
ax5.annotate(labels[i].upper() + ':' + a[i] + 'get_ipython().run_line_magic("',", " xy=(x, y),fontsize=11,fontweight=575, xytext=(1.35*np.sign(x), 1.4*y),")
horizontalalignment=horizontalalignment, **kw)
# set title
ax5.set_title("Post-Brexit UK Average Asset \n Distribution by Sector (2017-2018)",fontsize=20,fontweight=600)
## Sixth Plot
plt.style.use('seaborn-whitegrid')
ax6 = plt.subplot(gs1[3, 0:2])
linestyle = ['solid', 'dashed', 'dashdot', 'solid', 'dashed', 'dashdot',]
colors = ['#377eb8', '#ff7f00', '#4daf4a','#f781bf', '#a65628', '#984ea3']
sectors = short['sector'].unique()
brexit_date = 5+23/31
months = ['Jan','Feb','Mar','Apr','May','June','July','Aug','Sep','Oct','Nov','Dec']
numeric_months = [0,1,2,3,4,5,6,7,8,9,10,11]
df = uk_short
for i in range(6):
sector = sectors[i]
selected_rows = df[df['sector'] ==sector]
if i not in [0,2]:
ax6.plot(selected_rows['month'],selected_rows['pct_prices'],label=str(sector),lw=4,ls=linestyle[1],alpha=0.6,color=colors[i],zorder=1)
if i in [0,2]:
ax6.plot(selected_rows['month'],selected_rows['pct_prices'],label=str(sector),lw=4,ls=linestyle[3],alpha=0.9,color=colors[i],zorder=1)
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
x_scatter = [brexit_date,11]
y_scatter = [y_bx,y_ey]
ax6.scatter(x_scatter,y_scatter,lw=7,color='black',zorder=2)
##Text for energy and materials
selected_rows = df[df['sector'] ==sectors[2]]
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
##Brexit
ax6.annotate(str(round(y_bx))+'get_ipython().run_line_magic("',", "")
xy=(brexit_date, y_bx+1),color='green',fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(brexit_date, y_bx+4),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Scatter at origin
ax6.scatter(0,0,lw=7,color='black',zorder=2)
##End of year
ax6.annotate(str(round(y_ey))+'get_ipython().run_line_magic("',", "")
xy=(11, y_ey-1),color='green',fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(11, y_ey-7),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Text for consumer discretionary
selected_rows = df[df['sector'] ==sectors[0]]
x = selected_rows['month']
y = selected_rows['pct_prices']
y_bx = np.interp(brexit_date+1, x,y)
y_ey = np.interp(12, x,y)
##Brexit
ax6.annotate(str(round(y_bx))+'get_ipython().run_line_magic("',", "")
xy=(brexit_date, y_bx+1),color=colors[0],fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(brexit_date, y_bx+4),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##End of year
ax6.annotate(str(round(y_ey))+'get_ipython().run_line_magic("',", "")
xy=(11, y_ey-1),color=colors[0],fontsize=15,xycoords='data',va="bottom",
ha="center",xytext=(11, y_ey-7),
arrowprops=dict(arrowstyle="->",color='black',lw=2),)
##Text for brexit date
ax6.axvline(brexit_date,0,0.02,color='black',lw=1.5)
ax6.text(brexit_date-0.3,-22,'23/06',fontsize=15,fontweight=575)
#Customizing x and y tick labels
ax6.set_ylim([-25,25])
ax6.set_yticks([-20,-10,0,10,20])
ax6.set_yticklabels(labels=[-20,-10,0,10,20],fontsize=15,fontweight=575)
ax6.set_xlim([-0.1,11.1])
ax6.set_xticks(numeric_months)
ax6.set_xticklabels(labels=months,rotation=45,fontsize=15,fontweight=575)
#Delete top and right spines
ax6.spines["right"].set_visible(False)
ax6.spines["top"].set_visible(False)
#Legends and Labels
ax6.legend(loc='upper left',fontsize=13)
ax6.set_xlabel('Month Of The Year',fontsize=15,fontweight=600)
ax6.set_ylabel('\% Changes',fontsize=15,fontweight=600)
#Set Title
ax6.set_title('UK Share Price Percentage Change \n by Sector (Daily, 2016)',fontweight=600,fontsize=22)
ax6.spines['right'].set_visible(False)
ax6.spines['left'].set_visible(False)
ax6.spines['top'].set_visible(False)
ax6.spines['bottom'].set_visible(False)
#Horizontal Line at 100
ax6.axhline(0,color='black',alpha=0.8,lw=2)
plt.show()
| 38.092973
| 161
| 0.687876
| 5,673
| 35,236
| 4.117927
| 0.076326
| 0.022131
| 0.023115
| 0.020376
| 0.822782
| 0.797825
| 0.767904
| 0.752665
| 0.747785
| 0.728094
| 0
| 0.06663
| 0.124702
| 35,236
| 924
| 162
| 38.134199
| 0.690811
| 0
| 0
| 0.66065
| 0
| 0.077617
| 0.177442
| 0.034977
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.023466
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
28efe588708093fb6d72eb1debb8e5d9753a5e1a
| 111,059
|
py
|
Python
|
msgraph-cli-extensions/beta/devicescloudprint_beta/azext_devicescloudprint_beta/generated/custom.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/devicescloudprint_beta/azext_devicescloudprint_beta/generated/custom.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
msgraph-cli-extensions/beta/devicescloudprint_beta/azext_devicescloudprint_beta/generated/custom.py
|
thewahome/msgraph-cli
|
33127d9efa23a0e5f5303c93242fbdbb73348671
|
[
"MIT"
] | null | null | null |
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-lines
def devicescloudprint_print_print_show_print(client,
select=None,
expand=None):
return client.get_print(select=select,
expand=expand)
def devicescloudprint_print_print_update_print(client,
connectors=None,
operations=None,
printers=None,
printer_shares=None,
reports=None,
services=None,
shares=None,
task_definitions=None,
document_conversion_enabled=None):
body = {}
body['connectors'] = connectors
body['operations'] = operations
body['printers'] = printers
body['printer_shares'] = printer_shares
body['reports'] = reports
body['services'] = services
body['shares'] = shares
body['task_definitions'] = task_definitions
body['settings'] = {}
body['settings']['document_conversion_enabled'] = document_conversion_enabled
return client.update_print(body=body)
def devicescloudprint_print_create_connector(client,
id_=None,
app_version=None,
display_name=None,
fully_qualified_domain_name=None,
location=None,
name=None,
operating_system=None,
registered_date_time=None,
last_connection_time=None):
body = {}
body['id'] = id_
body['app_version'] = app_version
body['display_name'] = display_name
body['fully_qualified_domain_name'] = fully_qualified_domain_name
body['location'] = location
body['name'] = name
body['operating_system'] = operating_system
body['registered_date_time'] = registered_date_time
body['device_health'] = {}
body['device_health']['last_connection_time'] = last_connection_time
return client.create_connectors(body=body)
def devicescloudprint_print_create_operation(client,
id_=None,
created_date_time=None,
status=None):
body = {}
body['id'] = id_
body['created_date_time'] = created_date_time
body['status'] = status
return client.create_operations(body=body)
def devicescloudprint_print_create_printer(client,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
accepting_jobs=None,
is_shared=None,
registered_date_time=None,
allowed_groups=None,
allowed_users=None,
connectors=None,
share=None,
shares=None,
task_triggers=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['accepting_jobs'] = accepting_jobs
body['is_shared'] = is_shared
body['registered_date_time'] = registered_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['connectors'] = connectors
body['share'] = share
body['shares'] = shares
body['task_triggers'] = task_triggers
return client.create_printers(body=body)
def devicescloudprint_print_create_printer_share(client,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
allow_all_users=None,
created_date_time=None,
allowed_groups=None,
allowed_users=None,
printer=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['allow_all_users'] = allow_all_users
body['created_date_time'] = created_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['printer'] = printer
return client.create_printer_shares(body=body)
def devicescloudprint_print_create_report(client,
id_=None,
application_sign_in_detailed_summary=None,
credential_user_registration_details=None,
user_credential_usage_details=None,
daily_print_usage_summaries_by_printer=None,
daily_print_usage_summaries_by_user=None,
monthly_print_usage_summaries_by_printer=None,
monthly_print_usage_summaries_by_user=None):
body = {}
body['id'] = id_
body['application_sign_in_detailed_summary'] = application_sign_in_detailed_summary
body['credential_user_registration_details'] = credential_user_registration_details
body['user_credential_usage_details'] = user_credential_usage_details
body['daily_print_usage_summaries_by_printer'] = daily_print_usage_summaries_by_printer
body['daily_print_usage_summaries_by_user'] = daily_print_usage_summaries_by_user
body['monthly_print_usage_summaries_by_printer'] = monthly_print_usage_summaries_by_printer
body['monthly_print_usage_summaries_by_user'] = monthly_print_usage_summaries_by_user
return client.create_reports(body=body)
def devicescloudprint_print_create_service(client,
id_=None,
endpoints=None):
body = {}
body['id'] = id_
body['endpoints'] = endpoints
return client.create_services(body=body)
def devicescloudprint_print_create_share(client,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
allow_all_users=None,
created_date_time=None,
allowed_groups=None,
allowed_users=None,
printer=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['allow_all_users'] = allow_all_users
body['created_date_time'] = created_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['printer'] = printer
return client.create_shares(body=body)
def devicescloudprint_print_create_task_definition(client,
id_=None,
created_by=None,
display_name=None,
tasks=None):
body = {}
body['id'] = id_
body['created_by'] = created_by
body['display_name'] = display_name
body['tasks'] = tasks
return client.create_task_definitions(body=body)
def devicescloudprint_print_delete_connector(client,
print_connector_id,
if_match=None):
return client.delete_connectors(print_connector_id=print_connector_id,
if_match=if_match)
def devicescloudprint_print_delete_operation(client,
print_operation_id,
if_match=None):
return client.delete_operations(print_operation_id=print_operation_id,
if_match=if_match)
def devicescloudprint_print_delete_printer(client,
printer_id,
if_match=None):
return client.delete_printers(printer_id=printer_id,
if_match=if_match)
def devicescloudprint_print_delete_printer_share(client,
printer_share_id,
if_match=None):
return client.delete_printer_shares(printer_share_id=printer_share_id,
if_match=if_match)
def devicescloudprint_print_delete_report(client,
report_root_id,
if_match=None):
return client.delete_reports(report_root_id=report_root_id,
if_match=if_match)
def devicescloudprint_print_delete_service(client,
print_service_id,
if_match=None):
return client.delete_services(print_service_id=print_service_id,
if_match=if_match)
def devicescloudprint_print_delete_share(client,
printer_share_id,
if_match=None):
return client.delete_shares(printer_share_id=printer_share_id,
if_match=if_match)
def devicescloudprint_print_delete_task_definition(client,
print_task_definition_id,
if_match=None):
return client.delete_task_definitions(print_task_definition_id=print_task_definition_id,
if_match=if_match)
def devicescloudprint_print_list_connector(client,
orderby=None,
select=None,
expand=None):
return client.list_connectors(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_operation(client,
orderby=None,
select=None,
expand=None):
return client.list_operations(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_printer(client,
orderby=None,
select=None,
expand=None):
return client.list_printers(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_printer_share(client,
orderby=None,
select=None,
expand=None):
return client.list_printer_shares(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_report(client,
orderby=None,
select=None,
expand=None):
return client.list_reports(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_service(client,
orderby=None,
select=None,
expand=None):
return client.list_services(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_share(client,
orderby=None,
select=None,
expand=None):
return client.list_shares(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_list_task_definition(client,
orderby=None,
select=None,
expand=None):
return client.list_task_definitions(orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_show_connector(client,
print_connector_id,
select=None,
expand=None):
return client.get_connectors(print_connector_id=print_connector_id,
select=select,
expand=expand)
def devicescloudprint_print_show_operation(client,
print_operation_id,
select=None,
expand=None):
return client.get_operations(print_operation_id=print_operation_id,
select=select,
expand=expand)
def devicescloudprint_print_show_printer(client,
printer_id,
select=None,
expand=None):
return client.get_printers(printer_id=printer_id,
select=select,
expand=expand)
def devicescloudprint_print_show_printer_share(client,
printer_share_id,
select=None,
expand=None):
return client.get_printer_shares(printer_share_id=printer_share_id,
select=select,
expand=expand)
def devicescloudprint_print_show_report(client,
report_root_id,
select=None,
expand=None):
return client.get_reports(report_root_id=report_root_id,
select=select,
expand=expand)
def devicescloudprint_print_show_service(client,
print_service_id,
select=None,
expand=None):
return client.get_services(print_service_id=print_service_id,
select=select,
expand=expand)
def devicescloudprint_print_show_share(client,
printer_share_id,
select=None,
expand=None):
return client.get_shares(printer_share_id=printer_share_id,
select=select,
expand=expand)
def devicescloudprint_print_show_task_definition(client,
print_task_definition_id,
select=None,
expand=None):
return client.get_task_definitions(print_task_definition_id=print_task_definition_id,
select=select,
expand=expand)
def devicescloudprint_print_update_connector(client,
print_connector_id,
id_=None,
app_version=None,
display_name=None,
fully_qualified_domain_name=None,
location=None,
name=None,
operating_system=None,
registered_date_time=None,
last_connection_time=None):
body = {}
body['id'] = id_
body['app_version'] = app_version
body['display_name'] = display_name
body['fully_qualified_domain_name'] = fully_qualified_domain_name
body['location'] = location
body['name'] = name
body['operating_system'] = operating_system
body['registered_date_time'] = registered_date_time
body['device_health'] = {}
body['device_health']['last_connection_time'] = last_connection_time
return client.update_connectors(print_connector_id=print_connector_id,
body=body)
def devicescloudprint_print_update_operation(client,
print_operation_id,
id_=None,
created_date_time=None,
status=None):
body = {}
body['id'] = id_
body['created_date_time'] = created_date_time
body['status'] = status
return client.update_operations(print_operation_id=print_operation_id,
body=body)
def devicescloudprint_print_update_printer(client,
printer_id,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
accepting_jobs=None,
is_shared=None,
registered_date_time=None,
allowed_groups=None,
allowed_users=None,
connectors=None,
share=None,
shares=None,
task_triggers=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['accepting_jobs'] = accepting_jobs
body['is_shared'] = is_shared
body['registered_date_time'] = registered_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['connectors'] = connectors
body['share'] = share
body['shares'] = shares
body['task_triggers'] = task_triggers
return client.update_printers(printer_id=printer_id,
body=body)
def devicescloudprint_print_update_printer_share(client,
printer_share_id,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
allow_all_users=None,
created_date_time=None,
allowed_groups=None,
allowed_users=None,
printer=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['allow_all_users'] = allow_all_users
body['created_date_time'] = created_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['printer'] = printer
return client.update_printer_shares(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_update_report(client,
report_root_id,
id_=None,
application_sign_in_detailed_summary=None,
credential_user_registration_details=None,
user_credential_usage_details=None,
daily_print_usage_summaries_by_printer=None,
daily_print_usage_summaries_by_user=None,
monthly_print_usage_summaries_by_printer=None,
monthly_print_usage_summaries_by_user=None):
body = {}
body['id'] = id_
body['application_sign_in_detailed_summary'] = application_sign_in_detailed_summary
body['credential_user_registration_details'] = credential_user_registration_details
body['user_credential_usage_details'] = user_credential_usage_details
body['daily_print_usage_summaries_by_printer'] = daily_print_usage_summaries_by_printer
body['daily_print_usage_summaries_by_user'] = daily_print_usage_summaries_by_user
body['monthly_print_usage_summaries_by_printer'] = monthly_print_usage_summaries_by_printer
body['monthly_print_usage_summaries_by_user'] = monthly_print_usage_summaries_by_user
return client.update_reports(report_root_id=report_root_id,
body=body)
def devicescloudprint_print_update_service(client,
print_service_id,
id_=None,
endpoints=None):
body = {}
body['id'] = id_
body['endpoints'] = endpoints
return client.update_services(print_service_id=print_service_id,
body=body)
def devicescloudprint_print_update_share(client,
printer_share_id,
id_=None,
defaults=None,
display_name=None,
is_accepting_jobs=None,
location=None,
manufacturer=None,
model=None,
name=None,
status=None,
jobs=None,
bottom_margins=None,
collation=None,
color_modes=None,
content_types=None,
copies_per_job=None,
dpis=None,
duplex_modes=None,
feed_directions=None,
feed_orientations=None,
finishings=None,
input_bins=None,
is_color_printing_supported=None,
is_page_range_supported=None,
left_margins=None,
media_colors=None,
media_sizes=None,
media_types=None,
multipage_layouts=None,
orientations=None,
output_bins=None,
pages_per_sheet=None,
qualities=None,
right_margins=None,
scalings=None,
supported_color_configurations=None,
supported_copies_per_job=None,
supported_document_mime_types=None,
supported_duplex_configurations=None,
supported_finishings=None,
supported_media_colors=None,
supported_media_sizes=None,
supported_media_types=None,
supported_orientations=None,
supported_output_bins=None,
supported_pages_per_sheet=None,
supported_presentation_directions=None,
supported_print_qualities=None,
supports_fit_pdf_to_page=None,
top_margins=None,
allow_all_users=None,
created_date_time=None,
allowed_groups=None,
allowed_users=None,
printer=None):
body = {}
body['id'] = id_
body['defaults'] = defaults
body['display_name'] = display_name
body['is_accepting_jobs'] = is_accepting_jobs
body['location'] = location
body['manufacturer'] = manufacturer
body['model'] = model
body['name'] = name
body['status'] = status
body['jobs'] = jobs
body['capabilities'] = {}
body['capabilities']['bottom_margins'] = bottom_margins
body['capabilities']['collation'] = collation
body['capabilities']['color_modes'] = color_modes
body['capabilities']['content_types'] = content_types
body['capabilities']['copies_per_job'] = copies_per_job
body['capabilities']['dpis'] = dpis
body['capabilities']['duplex_modes'] = duplex_modes
body['capabilities']['feed_directions'] = feed_directions
body['capabilities']['feed_orientations'] = feed_orientations
body['capabilities']['finishings'] = finishings
body['capabilities']['input_bins'] = input_bins
body['capabilities']['is_color_printing_supported'] = is_color_printing_supported
body['capabilities']['is_page_range_supported'] = is_page_range_supported
body['capabilities']['left_margins'] = left_margins
body['capabilities']['media_colors'] = media_colors
body['capabilities']['media_sizes'] = media_sizes
body['capabilities']['media_types'] = media_types
body['capabilities']['multipage_layouts'] = multipage_layouts
body['capabilities']['orientations'] = orientations
body['capabilities']['output_bins'] = output_bins
body['capabilities']['pages_per_sheet'] = pages_per_sheet
body['capabilities']['qualities'] = qualities
body['capabilities']['right_margins'] = right_margins
body['capabilities']['scalings'] = scalings
body['capabilities']['supported_color_configurations'] = supported_color_configurations
body['capabilities']['supported_copies_per_job'] = supported_copies_per_job
body['capabilities']['supported_document_mime_types'] = supported_document_mime_types
body['capabilities']['supported_duplex_configurations'] = supported_duplex_configurations
body['capabilities']['supported_finishings'] = supported_finishings
body['capabilities']['supported_media_colors'] = supported_media_colors
body['capabilities']['supported_media_sizes'] = supported_media_sizes
body['capabilities']['supported_media_types'] = supported_media_types
body['capabilities']['supported_orientations'] = supported_orientations
body['capabilities']['supported_output_bins'] = supported_output_bins
body['capabilities']['supported_pages_per_sheet'] = supported_pages_per_sheet
body['capabilities']['supported_presentation_directions'] = supported_presentation_directions
body['capabilities']['supported_print_qualities'] = supported_print_qualities
body['capabilities']['supports_fit_pdf_to_page'] = supports_fit_pdf_to_page
body['capabilities']['top_margins'] = top_margins
body['allow_all_users'] = allow_all_users
body['created_date_time'] = created_date_time
body['allowed_groups'] = allowed_groups
body['allowed_users'] = allowed_users
body['printer'] = printer
return client.update_shares(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_update_task_definition(client,
print_task_definition_id,
id_=None,
created_by=None,
display_name=None,
tasks=None):
body = {}
body['id'] = id_
body['created_by'] = created_by
body['display_name'] = display_name
body['tasks'] = tasks
return client.update_task_definitions(print_task_definition_id=print_task_definition_id,
body=body)
def devicescloudprint_print_printer_create(client,
display_name=None,
manufacturer=None,
model=None,
physical_device_id=None,
has_physical_device=None,
certificate_signing_request=None,
connector_id=None):
if has_physical_device is None:
has_physical_device = False
body = {}
body['display_name'] = display_name
body['manufacturer'] = manufacturer
body['model'] = model
body['physical_device_id'] = physical_device_id
body['has_physical_device'] = False if has_physical_device is None else has_physical_device
body['certificate_signing_request'] = certificate_signing_request
body['connector_id'] = connector_id
return client.create(body=body)
def devicescloudprint_print_printer_create_allowed_group(client,
printer_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.create_allowed_groups(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_create_allowed_user(client,
printer_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.create_allowed_users(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_create_ref_connector(client,
printer_id,
body):
return client.create_ref_connectors(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_create_ref_share(client,
printer_id,
body):
return client.create_ref_shares(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_create_task_trigger(client,
printer_id,
id_=None,
event=None,
definition=None):
body = {}
body['id'] = id_
body['event'] = event
body['definition'] = definition
return client.create_task_triggers(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_delete_allowed_group(client,
printer_id,
print_identity_id,
if_match=None):
return client.delete_allowed_groups(printer_id=printer_id,
print_identity_id=print_identity_id,
if_match=if_match)
def devicescloudprint_print_printer_delete_allowed_user(client,
printer_id,
print_user_identity_id,
if_match=None):
return client.delete_allowed_users(printer_id=printer_id,
print_user_identity_id=print_user_identity_id,
if_match=if_match)
def devicescloudprint_print_printer_delete_ref_share(client,
printer_id,
if_match=None):
return client.delete_ref_share(printer_id=printer_id,
if_match=if_match)
def devicescloudprint_print_printer_delete_task_trigger(client,
printer_id,
print_task_trigger_id,
if_match=None):
return client.delete_task_triggers(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
if_match=if_match)
def devicescloudprint_print_printer_list_allowed_group(client,
printer_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_groups(printer_id=printer_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_list_allowed_user(client,
printer_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_users(printer_id=printer_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_list_connector(client,
printer_id,
orderby=None,
select=None,
expand=None):
return client.list_connectors(printer_id=printer_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_list_ref_connector(client,
printer_id,
orderby=None):
return client.list_ref_connectors(printer_id=printer_id,
orderby=orderby)
def devicescloudprint_print_printer_list_ref_share(client,
printer_id,
orderby=None):
return client.list_ref_shares(printer_id=printer_id,
orderby=orderby)
def devicescloudprint_print_printer_list_share(client,
printer_id,
orderby=None,
select=None,
expand=None):
return client.list_shares(printer_id=printer_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_list_task_trigger(client,
printer_id,
orderby=None,
select=None,
expand=None):
return client.list_task_triggers(printer_id=printer_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_reset_default(client,
printer_id):
return client.reset_defaults(printer_id=printer_id)
def devicescloudprint_print_printer_restore_factory_default(client,
printer_id):
return client.restore_factory_defaults(printer_id=printer_id)
def devicescloudprint_print_printer_set_ref_share(client,
printer_id,
body):
return client.set_ref_share(printer_id=printer_id,
body=body)
def devicescloudprint_print_printer_show_allowed_group(client,
printer_id,
print_identity_id,
select=None,
expand=None):
return client.get_allowed_groups(printer_id=printer_id,
print_identity_id=print_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_show_allowed_user(client,
printer_id,
print_user_identity_id,
select=None,
expand=None):
return client.get_allowed_users(printer_id=printer_id,
print_user_identity_id=print_user_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_show_capability(client,
printer_id):
return client.get_capabilities(printer_id=printer_id)
def devicescloudprint_print_printer_show_ref_share(client,
printer_id):
return client.get_ref_share(printer_id=printer_id)
def devicescloudprint_print_printer_show_share(client,
printer_id,
select=None,
expand=None):
return client.get_share(printer_id=printer_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_show_task_trigger(client,
printer_id,
print_task_trigger_id,
select=None,
expand=None):
return client.get_task_triggers(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_update_allowed_group(client,
printer_id,
print_identity_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.update_allowed_groups(printer_id=printer_id,
print_identity_id=print_identity_id,
body=body)
def devicescloudprint_print_printer_update_allowed_user(client,
printer_id,
print_user_identity_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.update_allowed_users(printer_id=printer_id,
print_user_identity_id=print_user_identity_id,
body=body)
def devicescloudprint_print_printer_update_task_trigger(client,
printer_id,
print_task_trigger_id,
id_=None,
event=None,
definition=None):
body = {}
body['id'] = id_
body['event'] = event
body['definition'] = definition
return client.update_task_triggers(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
body=body)
def devicescloudprint_print_printer_task_trigger_delete_ref_definition(client,
printer_id,
print_task_trigger_id,
if_match=None):
return client.delete_ref_definition(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
if_match=if_match)
def devicescloudprint_print_printer_task_trigger_set_ref_definition(client,
printer_id,
print_task_trigger_id,
body):
return client.set_ref_definition(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
body=body)
def devicescloudprint_print_printer_task_trigger_show_definition(client,
printer_id,
print_task_trigger_id,
select=None,
expand=None):
return client.get_definition(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_task_trigger_show_ref_definition(client,
printer_id,
print_task_trigger_id):
return client.get_ref_definition(printer_id=printer_id,
print_task_trigger_id=print_task_trigger_id)
def devicescloudprint_print_printer_share_create_allowed_group(client,
printer_share_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.create_allowed_groups(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_printer_share_create_allowed_user(client,
printer_share_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.create_allowed_users(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_printer_share_delete_allowed_group(client,
printer_share_id,
print_identity_id,
if_match=None):
return client.delete_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
if_match=if_match)
def devicescloudprint_print_printer_share_delete_allowed_user(client,
printer_share_id,
print_user_identity_id,
if_match=None):
return client.delete_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
if_match=if_match)
def devicescloudprint_print_printer_share_delete_ref_printer(client,
printer_share_id,
if_match=None):
return client.delete_ref_printer(printer_share_id=printer_share_id,
if_match=if_match)
def devicescloudprint_print_printer_share_list_allowed_group(client,
printer_share_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_groups(printer_share_id=printer_share_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_share_list_allowed_user(client,
printer_share_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_users(printer_share_id=printer_share_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_printer_share_set_ref_printer(client,
printer_share_id,
body):
return client.set_ref_printer(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_printer_share_show_allowed_group(client,
printer_share_id,
print_identity_id,
select=None,
expand=None):
return client.get_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_share_show_allowed_user(client,
printer_share_id,
print_user_identity_id,
select=None,
expand=None):
return client.get_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_share_show_printer(client,
printer_share_id,
select=None,
expand=None):
return client.get_printer(printer_share_id=printer_share_id,
select=select,
expand=expand)
def devicescloudprint_print_printer_share_show_ref_printer(client,
printer_share_id):
return client.get_ref_printer(printer_share_id=printer_share_id)
def devicescloudprint_print_printer_share_update_allowed_group(client,
printer_share_id,
print_identity_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.update_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
body=body)
def devicescloudprint_print_printer_share_update_allowed_user(client,
printer_share_id,
print_user_identity_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.update_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
body=body)
def devicescloudprint_print_printer_share_printer_reset_default(client,
printer_share_id):
return client.reset_defaults(printer_share_id=printer_share_id)
def devicescloudprint_print_printer_share_printer_restore_factory_default(client,
printer_share_id):
return client.restore_factory_defaults(printer_share_id=printer_share_id)
def devicescloudprint_print_printer_share_printer_show_capability(client,
printer_share_id):
return client.get_capabilities(printer_share_id=printer_share_id)
def devicescloudprint_print_report_show_group_archived_print_job(client,
group_id,
period_start,
period_end):
return client.get_group_archived_print_jobs(group_id=group_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_group_print_usage_summary(client,
group_id,
period_start,
period_end):
return client.get_group_print_usage_summary(group_id=group_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_overall_print_usage_summary(client,
period_start,
period_end,
top_lists_size):
return client.get_overall_print_usage_summary(period_start=period_start,
period_end=period_end,
top_lists_size=top_lists_size)
def devicescloudprint_print_report_show_print_usage_summary(client,
period_start,
period_end,
time_span_in_minutes=None):
if period_start is not None and period_end is not None and time_span_in_minutes is not None:
return client.get_print_usage_summaries_by_time_span(period_start=period_start,
period_end=period_end,
time_span_in_minutes=time_span_in_minutes)
elif period_start is not None and period_end is not None:
return client.get_print_usage_summaries_by_group(period_start=period_start,
period_end=period_end)
elif period_start is not None and period_end is not None:
return client.get_print_usage_summaries_by_printer(period_start=period_start,
period_end=period_end)
return client.get_print_usage_summaries_by_user(period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_printer_archived_print_job(client,
printer_id,
period_start,
period_end):
return client.get_printer_archived_print_jobs(printer_id=printer_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_printer_usage_summary(client,
printer_id,
period_start,
period_end):
return client.get_printer_usage_summary(printer_id=printer_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_user_archived_print_job(client,
user_id,
period_start,
period_end):
return client.get_user_archived_print_jobs(user_id=user_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_report_show_user_print_usage_summary(client,
user_id,
period_start,
period_end):
return client.get_user_print_usage_summary(user_id=user_id,
period_start=period_start,
period_end=period_end)
def devicescloudprint_print_service_create_endpoint(client,
print_service_id,
id_=None,
display_name=None,
uri=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['uri'] = uri
return client.create_endpoints(print_service_id=print_service_id,
body=body)
def devicescloudprint_print_service_delete_endpoint(client,
print_service_id,
print_service_endpoint_id,
if_match=None):
return client.delete_endpoints(print_service_id=print_service_id,
print_service_endpoint_id=print_service_endpoint_id,
if_match=if_match)
def devicescloudprint_print_service_list_endpoint(client,
print_service_id,
orderby=None,
select=None,
expand=None):
return client.list_endpoints(print_service_id=print_service_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_service_show_endpoint(client,
print_service_id,
print_service_endpoint_id,
select=None,
expand=None):
return client.get_endpoints(print_service_id=print_service_id,
print_service_endpoint_id=print_service_endpoint_id,
select=select,
expand=expand)
def devicescloudprint_print_service_update_endpoint(client,
print_service_id,
print_service_endpoint_id,
id_=None,
display_name=None,
uri=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['uri'] = uri
return client.update_endpoints(print_service_id=print_service_id,
print_service_endpoint_id=print_service_endpoint_id,
body=body)
def devicescloudprint_print_share_create_allowed_group(client,
printer_share_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.create_allowed_groups(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_share_create_allowed_user(client,
printer_share_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.create_allowed_users(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_share_delete_allowed_group(client,
printer_share_id,
print_identity_id,
if_match=None):
return client.delete_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
if_match=if_match)
def devicescloudprint_print_share_delete_allowed_user(client,
printer_share_id,
print_user_identity_id,
if_match=None):
return client.delete_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
if_match=if_match)
def devicescloudprint_print_share_delete_ref_printer(client,
printer_share_id,
if_match=None):
return client.delete_ref_printer(printer_share_id=printer_share_id,
if_match=if_match)
def devicescloudprint_print_share_list_allowed_group(client,
printer_share_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_groups(printer_share_id=printer_share_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_share_list_allowed_user(client,
printer_share_id,
orderby=None,
select=None,
expand=None):
return client.list_allowed_users(printer_share_id=printer_share_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_share_set_ref_printer(client,
printer_share_id,
body):
return client.set_ref_printer(printer_share_id=printer_share_id,
body=body)
def devicescloudprint_print_share_show_allowed_group(client,
printer_share_id,
print_identity_id,
select=None,
expand=None):
return client.get_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_share_show_allowed_user(client,
printer_share_id,
print_user_identity_id,
select=None,
expand=None):
return client.get_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
select=select,
expand=expand)
def devicescloudprint_print_share_show_printer(client,
printer_share_id,
select=None,
expand=None):
return client.get_printer(printer_share_id=printer_share_id,
select=select,
expand=expand)
def devicescloudprint_print_share_show_ref_printer(client,
printer_share_id):
return client.get_ref_printer(printer_share_id=printer_share_id)
def devicescloudprint_print_share_update_allowed_group(client,
printer_share_id,
print_identity_id,
id_=None,
display_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
return client.update_allowed_groups(printer_share_id=printer_share_id,
print_identity_id=print_identity_id,
body=body)
def devicescloudprint_print_share_update_allowed_user(client,
printer_share_id,
print_user_identity_id,
id_=None,
display_name=None,
ip_address=None,
user_principal_name=None):
body = {}
body['id'] = id_
body['display_name'] = display_name
body['ip_address'] = ip_address
body['user_principal_name'] = user_principal_name
return client.update_allowed_users(printer_share_id=printer_share_id,
print_user_identity_id=print_user_identity_id,
body=body)
def devicescloudprint_print_share_printer_reset_default(client,
printer_share_id):
return client.reset_defaults(printer_share_id=printer_share_id)
def devicescloudprint_print_share_printer_restore_factory_default(client,
printer_share_id):
return client.restore_factory_defaults(printer_share_id=printer_share_id)
def devicescloudprint_print_share_printer_show_capability(client,
printer_share_id):
return client.get_capabilities(printer_share_id=printer_share_id)
def devicescloudprint_print_task_definition_create_task(client,
print_task_definition_id,
id_=None,
parent_url=None,
status=None,
definition=None,
microsoft_graph_entity_id=None,
event=None,
microsoft_graph_print_task_definition=None):
body = {}
body['id'] = id_
body['parent_url'] = parent_url
body['status'] = status
body['definition'] = definition
body['trigger'] = {}
body['trigger']['id'] = microsoft_graph_entity_id
body['trigger']['event'] = event
body['trigger']['definition'] = microsoft_graph_print_task_definition
return client.create_tasks(print_task_definition_id=print_task_definition_id,
body=body)
def devicescloudprint_print_task_definition_delete_task(client,
print_task_definition_id,
print_task_id,
if_match=None):
return client.delete_tasks(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
if_match=if_match)
def devicescloudprint_print_task_definition_list_task(client,
print_task_definition_id,
orderby=None,
select=None,
expand=None):
return client.list_tasks(print_task_definition_id=print_task_definition_id,
orderby=orderby,
select=select,
expand=expand)
def devicescloudprint_print_task_definition_show_task(client,
print_task_definition_id,
print_task_id,
select=None,
expand=None):
return client.get_tasks(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
select=select,
expand=expand)
def devicescloudprint_print_task_definition_update_task(client,
print_task_definition_id,
print_task_id,
id_=None,
parent_url=None,
status=None,
definition=None,
microsoft_graph_entity_id=None,
event=None,
microsoft_graph_print_task_definition=None):
body = {}
body['id'] = id_
body['parent_url'] = parent_url
body['status'] = status
body['definition'] = definition
body['trigger'] = {}
body['trigger']['id'] = microsoft_graph_entity_id
body['trigger']['event'] = event
body['trigger']['definition'] = microsoft_graph_print_task_definition
return client.update_tasks(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
body=body)
def devicescloudprint_print_task_definition_task_delete_ref_definition(client,
print_task_definition_id,
print_task_id,
if_match=None):
return client.delete_ref_definition(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
if_match=if_match)
def devicescloudprint_print_task_definition_task_delete_ref_trigger(client,
print_task_definition_id,
print_task_id,
if_match=None):
return client.delete_ref_trigger(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
if_match=if_match)
def devicescloudprint_print_task_definition_task_set_ref_definition(client,
print_task_definition_id,
print_task_id,
body):
return client.set_ref_definition(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
body=body)
def devicescloudprint_print_task_definition_task_set_ref_trigger(client,
print_task_definition_id,
print_task_id,
body):
return client.set_ref_trigger(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
body=body)
def devicescloudprint_print_task_definition_task_show_definition(client,
print_task_definition_id,
print_task_id,
select=None,
expand=None):
return client.get_definition(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
select=select,
expand=expand)
def devicescloudprint_print_task_definition_task_show_ref_definition(client,
print_task_definition_id,
print_task_id):
return client.get_ref_definition(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id)
def devicescloudprint_print_task_definition_task_show_ref_trigger(client,
print_task_definition_id,
print_task_id):
return client.get_ref_trigger(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id)
def devicescloudprint_print_task_definition_task_show_trigger(client,
print_task_definition_id,
print_task_id,
select=None,
expand=None):
return client.get_trigger(print_task_definition_id=print_task_definition_id,
print_task_id=print_task_id,
select=select,
expand=expand)
| 54.12232
| 104
| 0.455965
| 8,224
| 111,059
| 5.716804
| 0.024805
| 0.081676
| 0.071786
| 0.034032
| 0.961948
| 0.948485
| 0.934234
| 0.906626
| 0.883654
| 0.853408
| 0
| 0
| 0.48648
| 111,059
| 2,051
| 105
| 54.148708
| 0.824376
| 0.004232
| 0
| 0.846979
| 0
| 0
| 0.08795
| 0.026519
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076228
| false
| 0
| 0
| 0.055336
| 0.15415
| 0.281197
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e92397028f89ec4045ccddf463fe879d4ae6a65c
| 484
|
py
|
Python
|
datacheck60_ten.py
|
SamhooXee/k
|
d55e55ae495ffae44738b0b6daa9a9902e11e91a
|
[
"Apache-2.0"
] | null | null | null |
datacheck60_ten.py
|
SamhooXee/k
|
d55e55ae495ffae44738b0b6daa9a9902e11e91a
|
[
"Apache-2.0"
] | null | null | null |
datacheck60_ten.py
|
SamhooXee/k
|
d55e55ae495ffae44738b0b6daa9a9902e11e91a
|
[
"Apache-2.0"
] | null | null | null |
def dataCheck_ten(datalist):
m = datalist[0]
m1 = datalist[1]
if m['Close'] > m1['Close'] and (m['Close'] - m1['Close'])/m1['Close'] > 0.095:
return (True, 'UP,%f,%f,%f' % (m['Close'], m1['Close'], (m['Close'] - m1['Close'])/m1['Close']))
elif m['Close'] < m1['Close'] and (m['Close'] - m1['Close'])/m1['Close'] < -0.095:
return (True, 'Down,%f,%f,%f' % (m['Close'], m1['Close'], (m['Close'] - m1['Close'])/m1['Close']))
return (False, 'NULL')
| 37.230769
| 106
| 0.504132
| 73
| 484
| 3.328767
| 0.273973
| 0.345679
| 0.592593
| 0.427984
| 0.707819
| 0.707819
| 0.707819
| 0.707819
| 0.707819
| 0.707819
| 0
| 0.058228
| 0.183884
| 484
| 12
| 107
| 40.333333
| 0.556962
| 0
| 0
| 0
| 0
| 0
| 0.26556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e933f10fa2adec3586c73cb2a4e5fbf7da1b4b15
| 194
|
py
|
Python
|
sciencer/collectors/__init__.py
|
SciencerIO/sciencer-toolkit
|
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
|
[
"MIT"
] | 2
|
2022-03-28T17:27:21.000Z
|
2022-03-29T22:27:15.000Z
|
sciencer/collectors/__init__.py
|
SciencerIO/sciencer-toolkit
|
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
|
[
"MIT"
] | null | null | null |
sciencer/collectors/__init__.py
|
SciencerIO/sciencer-toolkit
|
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
|
[
"MIT"
] | 1
|
2022-03-28T14:47:53.000Z
|
2022-03-28T14:47:53.000Z
|
"""Sciencer Collectors"""
from .collector import Collector
from .collect_by_doi import CollectByDOI
from .collect_by_author import CollectByAuthorID
from .collect_by_terms import CollectByTerms
| 32.333333
| 48
| 0.85567
| 24
| 194
| 6.666667
| 0.541667
| 0.20625
| 0.24375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092784
| 194
| 5
| 49
| 38.8
| 0.909091
| 0.097938
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3a8f16e185f4ad0a010841d512343fd7556db405
| 6,588
|
py
|
Python
|
scripts/slave/recipes/chromium.gpu.recipe_autogen.py
|
bopopescu/build
|
4e95fd33456e552bfaf7d94f7d04b19273d1c534
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/slave/recipes/chromium.gpu.recipe_autogen.py
|
bopopescu/build
|
4e95fd33456e552bfaf7d94f7d04b19273d1c534
|
[
"BSD-3-Clause"
] | null | null | null |
scripts/slave/recipes/chromium.gpu.recipe_autogen.py
|
bopopescu/build
|
4e95fd33456e552bfaf7d94f7d04b19273d1c534
|
[
"BSD-3-Clause"
] | 1
|
2020-07-23T11:05:06.000Z
|
2020-07-23T11:05:06.000Z
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'depot_tools/bot_update',
'chromium',
'depot_tools/gclient',
'recipe_engine/json',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
]
def Android_Debug__Nexus_9__steps(api):
# update scripts step; implicitly run by recipe engine.
# bot_update step
src_cfg = api.gclient.make_config(GIT_MODE=True)
soln = src_cfg.solutions.add()
soln.name = "src"
soln.url = "https://chromium.googlesource.com/chromium/src.git"
soln.custom_deps = {'src/third_party/WebKit/LayoutTests': None}
soln.custom_vars = {'webkit_trunk': 'http://src.chromium.org/blink/trunk',
'googlecode_url': 'http://%s.googlecode.com/svn',
'nacl_trunk': 'http://src.chromium.org/native_client/trunk',
'sourceforge_url': 'https://svn.code.sf.net/p/%(repo)s/code',
'llvm_url': 'http://llvm.org/svn/llvm-project'}
src_cfg.target_os = set(['android'])
src_cfg.got_revision_mapping.update({'src': 'got_revision',
'src/third_party/WebKit': 'got_webkit_revision',
'src/tools/swarming_client': 'got_swarming_client_revision',
'src/v8': 'got_v8_revision'})
api.gclient.c = src_cfg
api.bot_update.ensure_checkout(force=True)
# gclient revert step
api.gclient.revert()
# cleanup_temp step
api.chromium.cleanup_temp()
# slave_steps step
api.python("slave_steps", "src/build/android/buildbot/bb_run_bot.py",
args=['--build-properties=%s' % api.json.dumps(api.properties.legacy(),
separators=(',', ':')),
'--factory-properties={"GYP_DEFINES":" component=shared_library",'+\
'"android_bot_id":"gpu-builder-tests-dbg","clobber":false,'+\
'"gclient_env":{},"gclient_timeout":3600,"target":"Debug",'+\
'"target_os":"android"}'], allow_subannotations=True)
def Android_Debug__Nexus_5__steps(api):
# update scripts step; implicitly run by recipe engine.
# bot_update step
src_cfg = api.gclient.make_config(GIT_MODE=True)
soln = src_cfg.solutions.add()
soln.name = "src"
soln.url = "https://chromium.googlesource.com/chromium/src.git"
soln.custom_deps = {'src/third_party/WebKit/LayoutTests': None}
soln.custom_vars = {'webkit_trunk': 'http://src.chromium.org/blink/trunk',
'googlecode_url': 'http://%s.googlecode.com/svn',
'nacl_trunk': 'http://src.chromium.org/native_client/trunk',
'sourceforge_url': 'https://svn.code.sf.net/p/%(repo)s/code',
'llvm_url': 'http://llvm.org/svn/llvm-project'}
src_cfg.target_os = set(['android'])
src_cfg.got_revision_mapping.update({'src': 'got_revision',
'src/third_party/WebKit': 'got_webkit_revision',
'src/tools/swarming_client': 'got_swarming_client_revision',
'src/v8': 'got_v8_revision'})
api.gclient.c = src_cfg
api.bot_update.ensure_checkout(force=True)
# gclient revert step
api.gclient.revert()
# cleanup_temp step
api.chromium.cleanup_temp()
# slave_steps step
api.python("slave_steps", "src/build/android/buildbot/bb_run_bot.py",
args=['--build-properties=%s' % api.json.dumps(api.properties.legacy(),
separators=(',', ':')),
'--factory-properties={"GYP_DEFINES":" component=shared_library",'+\
'"android_bot_id":"gpu-builder-tests-dbg","clobber":false,'+\
'"gclient_env":{},"gclient_timeout":3600,"target":"Debug",'+\
'"target_os":"android"}'], allow_subannotations=True)
def Android_Debug__Nexus_6__steps(api):
# update scripts step; implicitly run by recipe engine.
# bot_update step
src_cfg = api.gclient.make_config(GIT_MODE=True)
soln = src_cfg.solutions.add()
soln.name = "src"
soln.url = "https://chromium.googlesource.com/chromium/src.git"
soln.custom_deps = {'src/third_party/WebKit/LayoutTests': None}
soln.custom_vars = {'webkit_trunk': 'http://src.chromium.org/blink/trunk',
'googlecode_url': 'http://%s.googlecode.com/svn',
'nacl_trunk': 'http://src.chromium.org/native_client/trunk',
'sourceforge_url': 'https://svn.code.sf.net/p/%(repo)s/code',
'llvm_url': 'http://llvm.org/svn/llvm-project'}
src_cfg.target_os = set(['android'])
src_cfg.got_revision_mapping.update({'src': 'got_revision',
'src/third_party/WebKit': 'got_webkit_revision',
'src/tools/swarming_client': 'got_swarming_client_revision',
'src/v8': 'got_v8_revision'})
api.gclient.c = src_cfg
api.bot_update.ensure_checkout(force=True)
# gclient revert step
api.gclient.revert()
# cleanup_temp step
api.chromium.cleanup_temp()
# slave_steps step
api.python("slave_steps", "src/build/android/buildbot/bb_run_bot.py",
args=['--build-properties=%s' % api.json.dumps(api.properties.legacy(),
separators=(',', ':')),
'--factory-properties={"GYP_DEFINES":" component=shared_library",'+\
'"android_bot_id":"gpu-builder-tests-dbg","clobber":false,'+\
'"gclient_env":{},"gclient_timeout":3600,"target":"Debug",'+\
'"target_os":"android"}'], allow_subannotations=True)
dispatch_directory = {
'Android Debug (Nexus 9)': Android_Debug__Nexus_9__steps,
'Android Debug (Nexus 5)': Android_Debug__Nexus_5__steps,
'Android Debug (Nexus 6)': Android_Debug__Nexus_6__steps,
}
def RunSteps(api):
if api.properties["buildername"] not in dispatch_directory:
raise api.step.StepFailure("Builder unsupported by recipe.")
else:
dispatch_directory[api.properties["buildername"]](api)
def GenTests(api):
yield (api.test('Android_Debug__Nexus_9_') +
api.properties(mastername='chromium.gpu') +
api.properties(buildername='Android Debug (Nexus 9)') +
api.properties(slavename='TestSlave')
)
yield (api.test('Android_Debug__Nexus_5_') +
api.properties(mastername='chromium.gpu') +
api.properties(buildername='Android Debug (Nexus 5)') +
api.properties(slavename='TestSlave')
)
yield (api.test('Android_Debug__Nexus_6_') +
api.properties(mastername='chromium.gpu') +
api.properties(buildername='Android Debug (Nexus 6)') +
api.properties(slavename='TestSlave')
)
yield (api.test('builder_not_in_dispatch_directory') +
api.properties(mastername='chromium.gpu') +
api.properties(buildername='nonexistent_builder') +
api.properties(slavename='TestSlave')
)
| 43.629139
| 80
| 0.675471
| 840
| 6,588
| 5.054762
| 0.17619
| 0.052049
| 0.060057
| 0.026849
| 0.865756
| 0.844795
| 0.836552
| 0.826425
| 0.812765
| 0.812765
| 0
| 0.006707
| 0.162568
| 6,588
| 150
| 81
| 43.92
| 0.762915
| 0.080449
| 0
| 0.710744
| 0
| 0
| 0.451399
| 0.207816
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041322
| false
| 0
| 0
| 0
| 0.041322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3aa9888c5f88ad919cadaff714c06be3b8f3761e
| 36,810
|
py
|
Python
|
profiles/e_confirmation/xb_request/_cvc.py
|
getodacu/eSENS-eDocument
|
e43bc9800070421de389d4826245dc0a48d387c4
|
[
"MIT"
] | null | null | null |
profiles/e_confirmation/xb_request/_cvc.py
|
getodacu/eSENS-eDocument
|
e43bc9800070421de389d4826245dc0a48d387c4
|
[
"MIT"
] | null | null | null |
profiles/e_confirmation/xb_request/_cvc.py
|
getodacu/eSENS-eDocument
|
e43bc9800070421de389d4826245dc0a48d387c4
|
[
"MIT"
] | null | null | null |
# ./_cvc.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:2590ac6675f7dc6a1ef34beec1fcf361b1df306e
# Generated 2015-02-11 21:35:49.973647 by PyXB version 1.2.4 using Python 2.6.9.final.0
# Namespace http://www.w3.org/ns/corevocabulary/BasicComponents [xmlns:cvc]
from __future__ import unicode_literals
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
import pyxb.utils.six as _six
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:2b2e2fd1-b225-11e4-b26c-14109fe53921')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.4'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import _udt as _ImportedBinding__udt
import pyxb.binding.datatypes
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI('http://www.w3.org/ns/corevocabulary/BasicComponents', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement, default_namespace=default_namespace)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, _six.text_type):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}DeathDateType with content type SIMPLE
class DeathDateType (_ImportedBinding__udt.DateType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}DeathDateType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.date
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'DeathDateType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 73, 3)
_ElementMap = _ImportedBinding__udt.DateType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.DateType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.DateType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'DeathDateType', DeathDateType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}RequestDateType with content type SIMPLE
class RequestDateType (_ImportedBinding__udt.DateType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}RequestDateType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.date
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RequestDateType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 113, 3)
_ElementMap = _ImportedBinding__udt.DateType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.DateType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.DateType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'RequestDateType', RequestDateType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}ActivityCodeType with content type SIMPLE
class ActivityCodeType (_ImportedBinding__udt.CodeType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}ActivityCodeType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActivityCodeType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 43, 3)
_ElementMap = _ImportedBinding__udt.CodeType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.CodeType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.CodeType
# Attribute listID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute name inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listSchemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'ActivityCodeType', ActivityCodeType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}ActivityDescriptionType with content type SIMPLE
class ActivityDescriptionType (_ImportedBinding__udt.TextType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}ActivityDescriptionType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'ActivityDescriptionType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 48, 3)
_ElementMap = _ImportedBinding__udt.TextType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.TextType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.TextType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
# Attribute languageLocaleID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'ActivityDescriptionType', ActivityDescriptionType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}AdminunitFirstlineType with content type SIMPLE
class AdminunitFirstlineType (_ImportedBinding__udt.TextType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}AdminunitFirstlineType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AdminunitFirstlineType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 53, 3)
_ElementMap = _ImportedBinding__udt.TextType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.TextType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.TextType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
# Attribute languageLocaleID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'AdminunitFirstlineType', AdminunitFirstlineType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}AlternativeNameType with content type SIMPLE
class AlternativeNameType (_ImportedBinding__udt.TextType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}AlternativeNameType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'AlternativeNameType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 58, 3)
_ElementMap = _ImportedBinding__udt.TextType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.TextType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.TextType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
# Attribute languageLocaleID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'AlternativeNameType', AlternativeNameType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}CompanyStatusCodeType with content type SIMPLE
class CompanyStatusCodeType (_ImportedBinding__udt.CodeType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}CompanyStatusCodeType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CompanyStatusCodeType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 63, 3)
_ElementMap = _ImportedBinding__udt.CodeType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.CodeType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.CodeType
# Attribute listID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute name inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listSchemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'CompanyStatusCodeType', CompanyStatusCodeType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}CompanyTypeCodeType with content type SIMPLE
class CompanyTypeCodeType (_ImportedBinding__udt.CodeType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}CompanyTypeCodeType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'CompanyTypeCodeType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 68, 3)
_ElementMap = _ImportedBinding__udt.CodeType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.CodeType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.CodeType
# Attribute listID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute name inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
# Attribute listSchemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}CodeType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'CompanyTypeCodeType', CompanyTypeCodeType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}GeographicIDType with content type SIMPLE
class GeographicIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}GeographicIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'GeographicIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 78, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'GeographicIDType', GeographicIDType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}GeographicNameType with content type SIMPLE
class GeographicNameType (_ImportedBinding__udt.TextType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}GeographicNameType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'GeographicNameType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 83, 3)
_ElementMap = _ImportedBinding__udt.TextType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.TextType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.TextType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
# Attribute languageLocaleID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'GeographicNameType', GeographicNameType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}HealthCareProviderIDType with content type SIMPLE
class HealthCareProviderIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}HealthCareProviderIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'HealthCareProviderIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 88, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'HealthCareProviderIDType', HealthCareProviderIDType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}HealthInsuranceOrganizationIDType with content type SIMPLE
class HealthInsuranceOrganizationIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}HealthInsuranceOrganizationIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'HealthInsuranceOrganizationIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 93, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'HealthInsuranceOrganizationIDType', HealthInsuranceOrganizationIDType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}JurisdictionIDType with content type SIMPLE
class JurisdictionIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}JurisdictionIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'JurisdictionIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 98, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'JurisdictionIDType', JurisdictionIDType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}LegalIDType with content type SIMPLE
class LegalIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}LegalIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LegalIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 103, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'LegalIDType', LegalIDType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}LegalNameType with content type SIMPLE
class LegalNameType (_ImportedBinding__udt.TextType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}LegalNameType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.string
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'LegalNameType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 108, 3)
_ElementMap = _ImportedBinding__udt.TextType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.TextType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.TextType
# Attribute languageID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
# Attribute languageLocaleID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}TextType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'LegalNameType', LegalNameType)
# Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}RequestIDType with content type SIMPLE
class RequestIDType (_ImportedBinding__udt.IdentifierType):
"""Complex type {http://www.w3.org/ns/corevocabulary/BasicComponents}RequestIDType with content type SIMPLE"""
_TypeDefinition = pyxb.binding.datatypes.normalizedString
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'RequestIDType')
_XSDLocation = pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 118, 3)
_ElementMap = _ImportedBinding__udt.IdentifierType._ElementMap.copy()
_AttributeMap = _ImportedBinding__udt.IdentifierType._AttributeMap.copy()
# Base type is _ImportedBinding__udt.IdentifierType
# Attribute schemeID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeAgencyName inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeVersionID inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeDataURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
# Attribute schemeURI inherited from {urn:un:unece:uncefact:data:specification:CoreComponentTypeSchemaModule:2}IdentifierType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', 'RequestIDType', RequestIDType)
DeathDate = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'DeathDate'), DeathDateType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 30, 3))
Namespace.addCategoryObject('elementBinding', DeathDate.name().localName(), DeathDate)
RequestDate = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'RequestDate'), RequestDateType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 39, 3))
Namespace.addCategoryObject('elementBinding', RequestDate.name().localName(), RequestDate)
ActivityCode = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ActivityCode'), ActivityCodeType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 24, 3))
Namespace.addCategoryObject('elementBinding', ActivityCode.name().localName(), ActivityCode)
ActivityDescription = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'ActivityDescription'), ActivityDescriptionType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 25, 3))
Namespace.addCategoryObject('elementBinding', ActivityDescription.name().localName(), ActivityDescription)
AdminunitFirstline = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AdminunitFirstline'), AdminunitFirstlineType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 26, 3))
Namespace.addCategoryObject('elementBinding', AdminunitFirstline.name().localName(), AdminunitFirstline)
AlternativeName = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'AlternativeName'), AlternativeNameType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 27, 3))
Namespace.addCategoryObject('elementBinding', AlternativeName.name().localName(), AlternativeName)
CompanyStatusCode = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'CompanyStatusCode'), CompanyStatusCodeType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 28, 3))
Namespace.addCategoryObject('elementBinding', CompanyStatusCode.name().localName(), CompanyStatusCode)
CompanyTypeCode = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'CompanyTypeCode'), CompanyTypeCodeType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 29, 3))
Namespace.addCategoryObject('elementBinding', CompanyTypeCode.name().localName(), CompanyTypeCode)
GeographicID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'GeographicID'), GeographicIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 31, 3))
Namespace.addCategoryObject('elementBinding', GeographicID.name().localName(), GeographicID)
GeographicName = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'GeographicName'), GeographicNameType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 32, 3))
Namespace.addCategoryObject('elementBinding', GeographicName.name().localName(), GeographicName)
HealthCareProviderID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HealthCareProviderID'), HealthCareProviderIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 33, 3))
Namespace.addCategoryObject('elementBinding', HealthCareProviderID.name().localName(), HealthCareProviderID)
HealthInsuranceOrganizationID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'HealthInsuranceOrganizationID'), HealthInsuranceOrganizationIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 34, 3))
Namespace.addCategoryObject('elementBinding', HealthInsuranceOrganizationID.name().localName(), HealthInsuranceOrganizationID)
JurisdictionID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'JurisdictionID'), JurisdictionIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 36, 3))
Namespace.addCategoryObject('elementBinding', JurisdictionID.name().localName(), JurisdictionID)
LegalID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LegalID'), LegalIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 37, 3))
Namespace.addCategoryObject('elementBinding', LegalID.name().localName(), LegalID)
LegalName = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'LegalName'), LegalNameType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 38, 3))
Namespace.addCategoryObject('elementBinding', LegalName.name().localName(), LegalName)
RequestID = pyxb.binding.basis.element(pyxb.namespace.ExpandedName(Namespace, 'RequestID'), RequestIDType, location=pyxb.utils.utility.Location('/Users/radu/Projects/esens/edocument/profiles/e_confirmation/xsd/request/CoreVocabularyBasicComponents.xsd', 40, 3))
Namespace.addCategoryObject('elementBinding', RequestID.name().localName(), RequestID)
| 61.146179
| 321
| 0.800462
| 3,630
| 36,810
| 7.984848
| 0.085399
| 0.035432
| 0.043609
| 0.04906
| 0.810833
| 0.808073
| 0.808073
| 0.805106
| 0.773642
| 0.769087
| 0
| 0.008838
| 0.105542
| 36,810
| 601
| 322
| 61.24792
| 0.871496
| 0.420755
| 0
| 0.538182
| 1
| 0.116364
| 0.224988
| 0.176342
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007273
| false
| 0
| 0.214545
| 0
| 0.698182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
aaf95edac41497e8ed62f0efd916d196d1003e3d
| 17,343
|
py
|
Python
|
model/FCN.py
|
Leyan529/ImageSegmentationPL
|
c41d6b4d2fa86620601dd4401775494d372f07b2
|
[
"MIT"
] | null | null | null |
model/FCN.py
|
Leyan529/ImageSegmentationPL
|
c41d6b4d2fa86620601dd4401775494d372f07b2
|
[
"MIT"
] | null | null | null |
model/FCN.py
|
Leyan529/ImageSegmentationPL
|
c41d6b4d2fa86620601dd4401775494d372f07b2
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.optim as optim
from torchvision import models
from torchvision.models.vgg import VGG
import torch.nn.functional as F
import pytorch_lightning as pl
import os
from LightningFunc.step import *
from LightningFunc.accuracy import *
from LightningFunc.optimizer import *
from LightningFunc.utils import *
from LightningFunc.losses import configure_loss
ranges = {
'vgg11': ((0, 3), (3, 6), (6, 11), (11, 16), (16, 21)),
'vgg13': ((0, 5), (5, 10), (10, 15), (15, 20), (20, 25)),
'vgg16': ((0, 5), (5, 10), (10, 17), (17, 24), (24, 31)),
'vgg19': ((0, 5), (5, 10), (10, 19), (19, 28), (28, 37))
}
# cropped version from https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
cfg = {
'vgg11': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg13': [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
'vgg16': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'],
'vgg19': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'],
}
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
class VGGNet(VGG):
def __init__(self, pretrained=True, model='vgg16', requires_grad=True, remove_fc=True, show_params=False):
super(VGGNet, self).__init__(make_layers(cfg[model]))
self.ranges = ranges[model]
if pretrained:
exec("self.load_state_dict(models.%s(pretrained=True).state_dict())" % model)
if not requires_grad:
for param in super().parameters():
param.requires_grad = False
if remove_fc: # delete redundant fully-connected layer params, can save memory
del self.classifier
if show_params:
for name, param in self.named_parameters():
print(name, param.size())
def forward(self, x):
# output = {}
output = []
# get the output of each maxpooling layer (5 maxpool in VGG net)
for idx in range(len(self.ranges)):
for layer in range(self.ranges[idx][0], self.ranges[idx][1]):
x = self.features[layer](x)
# output["x%d"%(idx+1)] = x
output.append(x)
return output
class FCN32s(pl.LightningModule):
def __init__(self, num_classes, args):
super().__init__()
self.num_classes = num_classes
self.__build_model()
self.__build_func(FCN32s)
self.args = args
self.criterion = configure_loss(self.args.criterion)
self.checkname = self.backbone
self.dir = os.path.join("log_dir", self.args.data_module ,self.checkname)
self.confusion_matrix = np.zeros((self.num_classes,) * 2)
self.sample = (8, 3, 512, 256)
self.sampleImg=torch.rand((1,3, 512, 256)).cuda()
def __build_model(self):
self.pretrained_net = VGGNet(requires_grad=True)
self.relu = nn.ReLU(inplace=True)
self.deconv1 = nn.ConvTranspose2d(512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn1 = nn.BatchNorm2d(512)
self.deconv2 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn2 = nn.BatchNorm2d(256)
self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn3 = nn.BatchNorm2d(128)
self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn4 = nn.BatchNorm2d(64)
self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn5 = nn.BatchNorm2d(32)
self.classifier = nn.Conv2d(32, self.num_classes, kernel_size=1)
def __build_func(self, obj):
"""Define model layers & loss."""
self.backbone = "FCN32s"
setattr(obj, "training_step", training_step)
setattr(obj, "training_epoch_end", training_epoch_end)
setattr(obj, "validation_step", validation_step)
setattr(obj, "validation_epoch_end", validation_epoch_end)
setattr(obj, "test_step", test_step)
setattr(obj, "test_epoch_end", test_epoch_end)
setattr(obj, "configure_optimizers", configure_optimizers)
setattr(obj, "prepare_matrix", prepare_matrix)
setattr(obj, "generate_matrix", generate_matrix)
setattr(obj, "saveDetail", saveDetail)
setattr(obj, "generate_score", generate_score)
setattr(obj, "write_Best_model_path", write_Best_model_path)
setattr(obj, "read_Best_model_path", read_Best_model_path)
def forward(self, x):
# output = self.pretrained_net(x)
# x5 = output['x5'] # size=(N, 512, x.H/32, x.W/32)
x1, x2, x3, x4, x5 = self.pretrained_net(x)
score = self.bn1(self.relu(self.deconv1(x5))) # size=(N, 512, x.H/16, x.W/16)
score = self.bn2(self.relu(self.deconv2(score))) # size=(N, 256, x.H/8, x.W/8)
score = self.bn3(self.relu(self.deconv3(score))) # size=(N, 128, x.H/4, x.W/4)
score = self.bn4(self.relu(self.deconv4(score))) # size=(N, 64, x.H/2, x.W/2)
score = self.bn5(self.relu(self.deconv5(score))) # size=(N, 32, x.H, x.W)
score = self.classifier(score) # size=(N, num_classes, x.H/1, x.W/1)
return score # size=(N, num_classes, x.H/1, x.W/1)
class FCN16s(pl.LightningModule):
def __init__(self, num_classes, args):
super().__init__()
self.num_classes = num_classes
self.__build_model()
self.__build_func(FCN16s)
self.args = args
self.criterion = configure_loss(self.args.criterion)
self.checkname = self.backbone
self.dir = os.path.join("log_dir", self.args.data_module ,self.checkname)
self.confusion_matrix = np.zeros((self.num_classes,) * 2)
self.sample = (8, 3, 512, 256)
self.sampleImg=torch.rand((1,3, 512, 256)).cuda()
def __build_model(self):
self.name = 'FCN16s'
self.pretrained_net = VGGNet(requires_grad=True)
self.relu = nn.ReLU(inplace=True)
self.deconv1 = nn.ConvTranspose2d(512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn1 = nn.BatchNorm2d(512)
self.deconv2 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn2 = nn.BatchNorm2d(256)
self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn3 = nn.BatchNorm2d(128)
self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn4 = nn.BatchNorm2d(64)
self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn5 = nn.BatchNorm2d(32)
self.classifier = nn.Conv2d(32, self.num_classes, kernel_size=1)
def __build_func(self, obj):
"""Define model layers & loss."""
self.backbone = "FCN16s"
setattr(obj, "training_step", training_step)
setattr(obj, "training_epoch_end", training_epoch_end)
setattr(obj, "validation_step", validation_step)
setattr(obj, "validation_epoch_end", validation_epoch_end)
setattr(obj, "test_step", test_step)
setattr(obj, "test_epoch_end", test_epoch_end)
setattr(obj, "configure_optimizers", configure_optimizers)
setattr(obj, "prepare_matrix", prepare_matrix)
setattr(obj, "generate_matrix", generate_matrix)
setattr(obj, "saveDetail", saveDetail)
setattr(obj, "generate_score", generate_score)
setattr(obj, "write_Best_model_path", write_Best_model_path)
setattr(obj, "read_Best_model_path", read_Best_model_path)
def forward(self, x):
# output = self.pretrained_net(x)
# x5 = output['x5'] # size=(N, 512, x.H/32, x.W/32)
# x4 = output['x4'] # size=(N, 512, x.H/16, x.W/16)
x1, x2, x3, x4, x5 = self.pretrained_net(x)
score = self.relu(self.deconv1(x5)) # size=(N, 512, x.H/16, x.W/16)
score = self.bn1(score + x4) # element-wise add, size=(N, 512, x.H/16, x.W/16)
score = self.bn2(self.relu(self.deconv2(score))) # size=(N, 256, x.H/8, x.W/8)
score = self.bn3(self.relu(self.deconv3(score))) # size=(N, 128, x.H/4, x.W/4)
score = self.bn4(self.relu(self.deconv4(score))) # size=(N, 64, x.H/2, x.W/2)
score = self.bn5(self.relu(self.deconv5(score))) # size=(N, 32, x.H, x.W)
score = self.classifier(score) # size=(N, num_classes, x.H/1, x.W/1)
return score # size=(N, num_classes, x.H/1, x.W/1)
class FCN8s(pl.LightningModule):
def __init__(self, num_classes, args):
super().__init__()
self.num_classes = num_classes
self.__build_model()
self.__build_func(FCN8s)
self.args = args
self.criterion = configure_loss(self.args.criterion)
self.checkname = self.backbone
self.dir = os.path.join("log_dir", self.args.data_module ,self.checkname)
self.confusion_matrix = np.zeros((self.num_classes,) * 2)
self.sample = (8, 3, 512, 256)
self.sampleImg=torch.rand((1,3, 512, 256)).cuda()
def __build_model(self):
self.name = 'FCN8s'
self.pretrained_net = VGGNet(requires_grad=True)
self.relu = nn.ReLU(inplace=True)
self.deconv1 = nn.ConvTranspose2d(512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn1 = nn.BatchNorm2d(512)
self.deconv2 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn2 = nn.BatchNorm2d(256)
self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn3 = nn.BatchNorm2d(128)
self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn4 = nn.BatchNorm2d(64)
self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn5 = nn.BatchNorm2d(32)
self.classifier = nn.Conv2d(32, self.num_classes, kernel_size=1)
def __build_func(self, obj):
"""Define model layers & loss."""
self.backbone = "FCN8s"
setattr(obj, "training_step", training_step)
setattr(obj, "training_epoch_end", training_epoch_end)
setattr(obj, "validation_step", validation_step)
setattr(obj, "validation_epoch_end", validation_epoch_end)
setattr(obj, "test_step", test_step)
setattr(obj, "test_epoch_end", test_epoch_end)
setattr(obj, "configure_optimizers", configure_optimizers)
setattr(obj, "prepare_matrix", prepare_matrix)
setattr(obj, "generate_matrix", generate_matrix)
setattr(obj, "saveDetail", saveDetail)
setattr(obj, "generate_score", generate_score)
setattr(obj, "write_Best_model_path", write_Best_model_path)
setattr(obj, "read_Best_model_path", read_Best_model_path)
def forward(self, x):
# output = self.pretrained_net(x)
# x5 = output['x5'] # size=(N, 512, x.H/32, x.W/32)
# x4 = output['x4'] # size=(N, 512, x.H/16, x.W/16)
# x3 = output['x3'] # size=(N, 256, x.H/8, x.W/8)
x1, x2, x3, x4, x5 = self.pretrained_net(x)
score = self.relu(self.deconv1(x5)) # size=(N, 512, x.H/16, x.W/16)
score = self.bn1(score + x4) # element-wise add, size=(N, 512, x.H/16, x.W/16)
score = self.relu(self.deconv2(score)) # size=(N, 256, x.H/8, x.W/8)
score = self.bn2(score + x3) # element-wise add, size=(N, 256, x.H/8, x.W/8)
score = self.bn3(self.relu(self.deconv3(score))) # size=(N, 128, x.H/4, x.W/4)
score = self.bn4(self.relu(self.deconv4(score))) # size=(N, 64, x.H/2, x.W/2)
score = self.bn5(self.relu(self.deconv5(score))) # size=(N, 32, x.H, x.W)
score = self.classifier(score) # size=(N, num_classes, x.H/1, x.W/1)
return score # size=(N, num_classes, x.H/1, x.W/1)
class FCNs(pl.LightningModule):
def __init__(self, num_classes, args):
super().__init__()
self.num_classes = num_classes
self.__build_model()
self.__build_func(FCNs)
self.args = args
self.criterion = configure_loss(self.args.criterion)
self.checkname = self.backbone
self.dir = os.path.join("log_dir", self.args.data_module ,self.checkname)
self.confusion_matrix = np.zeros((self.num_classes,) * 2)
self.sample = (8, 3, 512, 256)
self.sampleImg=torch.rand((1,3, 512, 256)).cuda()
def __build_model(self):
self.name = 'FCNs'
self.pretrained_net = VGGNet(requires_grad=True)
self.relu = nn.ReLU(inplace=True)
self.deconv1 = nn.ConvTranspose2d(512, 512, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn1 = nn.BatchNorm2d(512)
self.deconv2 = nn.ConvTranspose2d(512, 256, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn2 = nn.BatchNorm2d(256)
self.deconv3 = nn.ConvTranspose2d(256, 128, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn3 = nn.BatchNorm2d(128)
self.deconv4 = nn.ConvTranspose2d(128, 64, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn4 = nn.BatchNorm2d(64)
self.deconv5 = nn.ConvTranspose2d(64, 32, kernel_size=3, stride=2, padding=1, dilation=1, output_padding=1)
self.bn5 = nn.BatchNorm2d(32)
self.classifier = nn.Conv2d(32, self.num_classes, kernel_size=1)
def __build_func(self, obj):
"""Define model layers & loss."""
self.backbone = "FCNs"
setattr(obj, "training_step", training_step)
setattr(obj, "training_epoch_end", training_epoch_end)
setattr(obj, "validation_step", validation_step)
setattr(obj, "validation_epoch_end", validation_epoch_end)
setattr(obj, "test_step", test_step)
setattr(obj, "test_epoch_end", test_epoch_end)
setattr(obj, "configure_optimizers", configure_optimizers)
setattr(obj, "prepare_matrix", prepare_matrix)
setattr(obj, "generate_matrix", generate_matrix)
setattr(obj, "saveDetail", saveDetail)
setattr(obj, "generate_score", generate_score)
setattr(obj, "write_Best_model_path", write_Best_model_path)
setattr(obj, "read_Best_model_path", read_Best_model_path)
def forward(self, x):
x1, x2, x3, x4, x5 = self.pretrained_net(x)
# x5 = output['x5'] # size=(N, 512, x.H/32, x.W/32)
# x4 = output['x4'] # size=(N, 512, x.H/16, x.W/16)
# x3 = output['x3'] # size=(N, 256, x.H/8, x.W/8)
# x2 = output['x2'] # size=(N, 128, x.H/4, x.W/4)
# x1 = output['x1'] # size=(N, 64, x.H/2, x.W/2)
score = self.bn1(self.relu(self.deconv1(x5))) # size=(N, 512, x.H/16, x.W/16)
score = score + x4 # element-wise add, size=(N, 512, x.H/16, x.W/16)
score = self.bn2(self.relu(self.deconv2(score))) # size=(N, 256, x.H/8, x.W/8)
score = score + x3 # element-wise add, size=(N, 256, x.H/8, x.W/8)
score = self.bn3(self.relu(self.deconv3(score))) # size=(N, 128, x.H/4, x.W/4)
score = score + x2 # element-wise add, size=(N, 128, x.H/4, x.W/4)
score = self.bn4(self.relu(self.deconv4(score))) # size=(N, 64, x.H/2, x.W/2)
score = score + x1 # element-wise add, size=(N, 64, x.H/2, x.W/2)
score = self.bn5(self.relu(self.deconv5(score))) # size=(N, 32, x.H, x.W)
score = self.classifier(score) # size=(N, num_classes, x.H/1, x.W/1)
return score # size=(N, num_classes, x.H/1, x.W/1)
| 49.551429
| 118
| 0.593092
| 2,421
| 17,343
| 4.097893
| 0.084676
| 0.052414
| 0.024191
| 0.034271
| 0.849209
| 0.842052
| 0.841649
| 0.837012
| 0.836206
| 0.829553
| 0
| 0.076791
| 0.261143
| 17,343
| 349
| 119
| 49.69341
| 0.69744
| 0.125065
| 0
| 0.715328
| 0
| 0
| 0.06806
| 0.009839
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069343
| false
| 0
| 0.047445
| 0
| 0.156934
| 0.00365
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c923e8b18bc444ee7f0a62202849f038fc114293
| 22,105
|
py
|
Python
|
booking/tests.py
|
ripoul/SC-test
|
2102b7821307d81260f183f21ca57bdde0d6f2f9
|
[
"MIT"
] | 1
|
2020-04-16T15:35:46.000Z
|
2020-04-16T15:35:46.000Z
|
booking/tests.py
|
ripoul/SC-test
|
2102b7821307d81260f183f21ca57bdde0d6f2f9
|
[
"MIT"
] | 20
|
2020-02-24T12:07:02.000Z
|
2022-02-10T14:15:15.000Z
|
booking/tests.py
|
ripoul/SC-test
|
2102b7821307d81260f183f21ca57bdde0d6f2f9
|
[
"MIT"
] | 2
|
2020-02-29T13:55:47.000Z
|
2020-12-02T10:06:25.000Z
|
from django.test import RequestFactory, TestCase, Client
from booking import models
from django.contrib.auth.models import AnonymousUser, User
from django.core.exceptions import ValidationError
from django.urls import reverse
from . import views
from datetime import datetime
import pytz
import json
class dataForTests(TestCase):
def setUp(self):
self.rt1 = models.ResourceType.objects.create(name="écran")
self.loc1 = models.Location.objects.create(
name="salle de réunion 300", capacity=32
)
self.rs1 = models.Resource.objects.create(
resource_type=self.rt1,
word="capteur de présence ref 5478",
location=self.loc1,
)
self.user1 = User.objects.create_user("user", "user@example.com", "user")
self.admin1 = User.objects.create_superuser(
username="admin", email="admin@example.com", password="admin"
)
self.utc = pytz.UTC
models.Reservation.objects.create(
title="reunion",
start_date=self.utc.localize(datetime(2090, 6, 1, 12, 00, 00)),
end_date=self.utc.localize(datetime(2090, 6, 1, 13, 00, 00)),
resource=self.rs1,
owner=self.admin1,
)
self.factory = RequestFactory()
class ReservationModelTests(dataForTests):
def test_same_time_reservation(self):
with self.assertRaisesMessage(ValidationError, "already busy"):
models.Reservation.create(
title="reunion",
start_date=self.utc.localize(datetime(2090, 6, 1, 12, 00, 00)),
end_date=self.utc.localize(datetime(2090, 6, 1, 13, 00, 00)),
resource=self.rs1,
owner=self.user1,
)
def test_same_time_reservation_outer(self):
with self.assertRaisesMessage(ValidationError, "already busy"):
models.Reservation.create(
title="reunion",
start_date=self.utc.localize(datetime(2090, 6, 1, 11, 00, 00)),
end_date=self.utc.localize(datetime(2090, 6, 1, 14, 00, 00)),
resource=self.rs1,
owner=self.user1,
)
def test_start_date_after_end_date(self):
with self.assertRaisesMessage(
ValidationError, "start date must be before end date"
):
models.Reservation.create(
title="reunion",
start_date=self.utc.localize(datetime(2021, 6, 1, 12, 00, 00)),
end_date=self.utc.localize(datetime(2020, 6, 1, 13, 00, 00)),
resource=self.rs1,
owner=self.user1,
)
def test_start_date_passed(self):
with self.assertRaisesMessage(
ValidationError, "the reservation have to be not passed"
):
models.Reservation.create(
title="reunion",
start_date=self.utc.localize(datetime(2018, 6, 1, 12, 00, 00)),
end_date=self.utc.localize(datetime(2018, 6, 1, 13, 00, 00)),
resource=self.rs1,
owner=self.user1,
)
class bookingTests(dataForTests):
def test_logout_no_connected(self):
c = Client()
response = c.get(reverse("logout_view"))
self.assertRedirects(
response,
reverse("login_view")[:-1] + "?next=/booking/logout/",
status_code=302,
target_status_code=301,
)
def test_logout_connected(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("logout_view"))
self.assertRedirects(
response, reverse("login_view"), status_code=302, target_status_code=200
)
def test_login_connected(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("login_view"))
self.assertRedirects(
response, reverse("index"), status_code=302, target_status_code=200
)
def test_admin_connected_user(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("admin_view"))
self.assertRedirects(
response,
reverse("login_view") + "?next=%2Fbooking%2Fadmin%2F",
status_code=302,
target_status_code=302,
)
def test_admin_connected(self):
request = self.factory.get(reverse("admin_view"))
request.user = self.admin1
response = views.admin_view(request)
self.assertEqual(response.status_code, 200)
def test_location_view_user(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("location_view", args=[1,]))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/location/view/1",
status_code=302,
target_status_code=302,
)
def test_location_view_admin(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("location_view", args=[1,]))
self.assertEqual(response.status_code, 200)
def test_location_view_admin_not_exist(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("location_view", args=[2,]))
self.assertEqual(response.status_code, 404)
def test_rt_view_user(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("rt_view", args=[1,]))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/rt/view/1",
status_code=302,
target_status_code=302,
)
def test_rt_view_admin(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("rt_view", args=[1,]))
self.assertEqual(response.status_code, 200)
def test_rt_view_admin_not_exist(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("rt_view", args=[2,]))
self.assertEqual(response.status_code, 404)
def test_resource_view_user(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("resource_view", args=[1,]))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/resource/view/1",
status_code=302,
target_status_code=302,
)
def test_resource_view_admin(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("resource_view", args=[1,]))
self.assertEqual(response.status_code, 200)
def test_resource_view_admin_not_exist(self):
c = Client()
c.login(username="admin", password="admin")
response = c.get(reverse("resource_view", args=[2,]))
self.assertEqual(response.status_code, 404)
def test_location_edit_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("location_edit"))
self.assertEqual(response.status_code, 405)
def test_location_edit_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("location_edit"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/location/edit/",
status_code=302,
target_status_code=302,
)
def test_location_edit_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("location_edit"))
self.assertEqual(response.status_code, 400)
def test_location_edit_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("location_edit"), {"id": 1, "name": "cuisine", "capacity": 32}
)
self.assertRedirects(
response, reverse("admin_view"), status_code=302, target_status_code=200,
)
def test_location_edit_admin_post_not_exist(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("location_edit"), {"id": 2, "name": "cuisine", "capacity": 32}
)
self.assertEqual(response.status_code, 404)
def test_rt_edit_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("rt_edit"))
self.assertEqual(response.status_code, 405)
def test_rt_edit_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("rt_edit"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/rt/edit/",
status_code=302,
target_status_code=302,
)
def test_rt_edit_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("rt_edit"))
self.assertEqual(response.status_code, 400)
def test_rt_edit_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("rt_edit"), {"id": 1, "name": "pad"})
self.assertRedirects(
response, reverse("admin_view"), status_code=302, target_status_code=200,
)
def test_rt_edit_admin_post_not_exist(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("rt_edit"), {"id": 2, "name": "pad"})
self.assertEqual(response.status_code, 404)
def test_resource_edit_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("resource_edit"))
self.assertEqual(response.status_code, 405)
def test_resource_edit_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("resource_edit"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/resource/edit/",
status_code=302,
target_status_code=302,
)
def test_resource_edit_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("resource_edit"))
self.assertEqual(response.status_code, 400)
def test_resource_edit_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_edit"),
{"id": 1, "word": "pad 32", "location": 1, "rt": 1},
)
self.assertRedirects(
response, reverse("admin_view"), status_code=302, target_status_code=200,
)
def test_resource_edit_admin_post_not_exist_resource(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_edit"),
{"id": 2, "word": "pad 32", "location": 1, "rt": 1},
)
self.assertEqual(response.status_code, 404)
def test_resource_edit_admin_post_not_exist_location(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_edit"),
{"id": 1, "word": "pad 32", "location": 2, "rt": 1},
)
self.assertEqual(response.status_code, 404)
def test_resource_edit_admin_post_not_exist_rt(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_edit"),
{"id": 1, "word": "pad 32", "location": 1, "rt": 2},
)
self.assertEqual(response.status_code, 404)
def test_location_add_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("location_add"))
self.assertEqual(response.status_code, 405)
def test_location_add_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("location_add"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/location/add/",
status_code=302,
target_status_code=302,
)
def test_location_add_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("location_add"))
self.assertEqual(response.status_code, 400)
def test_location_add_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("location_add"), {"name": "cuisine", "capacity": 2},)
self.assertEqual(response.status_code, 200)
data = json.loads(response.json())[0]
self.assertEqual(data["fields"]["name"], "cuisine")
self.assertEqual(data["fields"]["capacity"], "2")
def test_rt_add_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("rt_add"))
self.assertEqual(response.status_code, 405)
def test_rt_add_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("rt_add"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/rt/add/",
status_code=302,
target_status_code=302,
)
def test_rt_add_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("rt_add"))
self.assertEqual(response.status_code, 400)
def test_rt_add_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("rt_add"), {"name": "test"},)
self.assertEqual(response.status_code, 200)
data = json.loads(response.json())[0]
self.assertEqual(data["fields"]["name"], "test")
def test_resource_add_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("resource_add"))
self.assertEqual(response.status_code, 405)
def test_resource_add_user_post(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("resource_add"))
self.assertRedirects(
response,
reverse("login_view") + "?next=/booking/resource/add/",
status_code=302,
target_status_code=302,
)
def test_resource_add_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("resource_add"))
self.assertEqual(response.status_code, 400)
def test_resource_add_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_add"), {"word": "resource1", "location": 1, "rt": 1},
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.json())[0]
self.assertEqual(data["fields"]["word"], "resource1")
self.assertEqual(data["fields"]["resource_type"], "écran")
self.assertEqual(data["fields"]["location"][0], "salle de réunion 300")
def test_resource_add_admin_post_not_exist_location(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_add"), {"word": "resource1", "location": 2, "rt": 1},
)
self.assertEqual(response.status_code, 404)
def test_resource_add_admin_post_not_exist_rt(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(
reverse("resource_add"), {"word": "resource1", "location": 1, "rt": 2},
)
self.assertEqual(response.status_code, 404)
def test_reservation_add_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("reservation_add"))
self.assertEqual(response.status_code, 405)
def test_reservation_add_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("reservation_add"))
self.assertEqual(response.status_code, 400)
def test_reservation_add_admin_post_ok(self):
c = Client(HTTP_HOST="localhost:8888")
c.login(username="admin", password="admin")
input_formats = [
"%Y-%m-%dT%H:%M",
]
start_date = datetime(2021, 6, 1, 12, 00, 00).strftime("%Y-%m-%dT%H:%M")
end_date = datetime(2021, 6, 1, 13, 00, 00).strftime("%Y-%m-%dT%H:%M")
response = c.post(
reverse("reservation_add"),
{
"id_resource": 1,
"title": "V2",
"start_date": start_date,
"end_date": end_date,
},
)
self.assertEqual(response.status_code, 200)
data = json.loads(response.json())[0]
self.assertEqual(data["pk"], 2)
self.assertEqual(data["fields"]["title"], "V2")
self.assertEqual(data["fields"]["start_date"], "2021-06-01T12:00:00Z")
self.assertEqual(data["fields"]["end_date"], "2021-06-01T13:00:00Z")
self.assertEqual(data["fields"]["resource"][0], "capteur de présence ref 5478")
self.assertEqual(data["fields"]["resource"][1], "salle de réunion 300")
self.assertEqual(data["fields"]["owner"][0], "admin")
def test_reservation_add_admin_post_ko(self):
c = Client()
c.login(username="admin", password="admin")
start_date = datetime(2025, 6, 1, 12, 00, 00).strftime("%Y-%m-%dT%H:%M")
end_date = datetime(2025, 6, 1, 13, 00, 00).strftime("%Y-%m-%dT%H:%M")
response = c.post(
reverse("reservation_add"),
{
"id_resource": 2,
"title": "V2",
"start_date": start_date,
"end_date": end_date,
},
)
self.assertEqual(response.status_code, 404)
def test_reservation_add_admin_post_dates_reversed(self):
c = Client()
c.login(username="admin", password="admin")
input_formats = [
"%Y-%m-%dT%H:%M",
]
start_date = datetime(2023, 6, 1, 12, 00, 00).strftime("%Y-%m-%dT%H:%M")
end_date = datetime(2022, 6, 1, 13, 00, 00).strftime("%Y-%m-%dT%H:%M")
response = c.post(
reverse("reservation_add"),
{
"id_resource": 1,
"title": "V2",
"start_date": start_date,
"end_date": end_date,
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"start date must be before end date")
def test_reservation_add_admin_post_dates_busy(self):
c = Client()
c.login(username="admin", password="admin")
input_formats = [
"%Y-%m-%dT%H:%M",
]
start_date = datetime(2090, 6, 1, 12, 00, 00).strftime("%Y-%m-%dT%H:%M")
end_date = datetime(2090, 6, 1, 13, 00, 00).strftime("%Y-%m-%dT%H:%M")
response = c.post(
reverse("reservation_add"),
{
"id_resource": 1,
"title": "V2",
"start_date": start_date,
"end_date": end_date,
},
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, b"already busy")
def test_reservation_del_user_get(self):
c = Client()
c.login(username="user", password="user")
response = c.get(reverse("reservation_delete"))
self.assertEqual(response.status_code, 405)
def test_reservation_del_admin_post(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("reservation_delete"))
self.assertEqual(response.status_code, 400)
def test_reservation_del_admin_post_ok(self):
c = Client()
c.login(username="admin", password="admin")
response = c.post(reverse("reservation_delete"), {"id": "1"},)
self.assertEqual(response.status_code, 204)
def test_reservation_del_admin_post_forbidden(self):
c = Client()
c.login(username="user", password="user")
response = c.post(reverse("reservation_delete"), {"id": "1"},)
self.assertEqual(response.status_code, 403)
def test_auth_user_get(self):
c = Client()
response = c.get(reverse("auth"))
self.assertEqual(response.status_code, 405)
def test_auth_admin_post(self):
c = Client()
response = c.post(reverse("auth"))
self.assertEqual(response.status_code, 400)
def test_auth_admin_post_ok_admin(self):
c = Client()
response = c.post(reverse("auth"), {"email": "admin", "password": "admin"},)
self.assertRedirects(
response, reverse("admin_view"), status_code=302, target_status_code=200,
)
def test_auth_admin_post_ok_user(self):
c = Client()
response = c.post(reverse("auth"), {"email": "user", "password": "user"},)
self.assertRedirects(
response, reverse("index"), status_code=302, target_status_code=200,
)
def test_auth_admin_post_forbidden(self):
c = Client()
response = c.post(
reverse("auth"), {"email": "admin", "password": "adminalezhgleagkn"},
)
self.assertEqual(response.status_code, 403)
| 36.53719
| 87
| 0.588555
| 2,592
| 22,105
| 4.82909
| 0.065201
| 0.062315
| 0.051849
| 0.049852
| 0.881281
| 0.851482
| 0.831509
| 0.812175
| 0.796597
| 0.683391
| 0
| 0.03687
| 0.269939
| 22,105
| 604
| 88
| 36.597682
| 0.738753
| 0
| 0
| 0.571698
| 0
| 0
| 0.128432
| 0.01321
| 0
| 0
| 0
| 0
| 0.149057
| 1
| 0.122642
| false
| 0.111321
| 0.016981
| 0
| 0.145283
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
c95facb117878232cd4df7ec388fd3f1ef0d00ff
| 20,195
|
py
|
Python
|
service/tests/functional/test_api.py
|
tuub/jper
|
1a723a36617b2c27b0fc43dd4cb9a0f5fe811f37
|
[
"Apache-2.0"
] | 2
|
2015-05-21T10:38:46.000Z
|
2015-06-10T15:49:50.000Z
|
service/tests/functional/test_api.py
|
tuub/jper
|
1a723a36617b2c27b0fc43dd4cb9a0f5fe811f37
|
[
"Apache-2.0"
] | 83
|
2015-05-05T11:10:13.000Z
|
2016-02-22T11:04:34.000Z
|
service/tests/functional/test_api.py
|
tuub/jper
|
1a723a36617b2c27b0fc43dd4cb9a0f5fe811f37
|
[
"Apache-2.0"
] | 3
|
2015-11-11T16:37:33.000Z
|
2016-07-26T10:33:52.000Z
|
"""
Functional tests for the API.
This test suite will work fine against a standard JPER install on the local machine with the default accounts.
If you want to run it in a different environment you will need to modify some of the constants used in this test.
"""
import requests, json, os
from octopus.modules.es.testindex import ESTestCase
from octopus.modules.test.helpers import get_first_free_port, TestServer, make_config
from service.tests import fixtures
from octopus.core import app
from service import web
from octopus.lib import paths
from octopus.modules.store import store
# FIXME: at this point these don't do anything. We'll need to create user accounts
# in the remote system and get their api keys.
API_KEY = "admin" # this is the password for the account, not the api-key, so it won't work
"""The API key for the admin account"""
INVALID_API_KEY = "abcdefg"
"""An API key which is not related to any account"""
class TestAPI(ESTestCase):
def setUp(self):
super(TestAPI, self).setUp()
self.config = {
"PORT" : get_first_free_port(),
"ELASTIC_SEARCH_INDEX" : app.config['ELASTIC_SEARCH_INDEX'],
"THREADED" : True,
"FUNCTIONAL_TEST_MODE" : True,
"STORE_IMPL" : "octopus.modules.store.store.TempStore",
"RUN_SCHEDULE": False
}
self.cfg_file = paths.rel2abs(__file__, "..", "resources", "test-server.cfg")
make_config(self.config, self.cfg_file)
self.test_server = TestServer(port=None, index=None, python_app_module_path=os.path.abspath(web.__file__), cfg_file=self.cfg_file)
self.test_server.spawn_with_config()
self.appurl = "http://localhost:{x}".format(x=self.config["PORT"])
self.api_base = self.appurl + "/api/v1/"
self.custom_zip_path = paths.rel2abs(__file__, "..", "resources", "custom.zip")
def tearDown(self):
super(TestAPI, self).tearDown()
self.test_server.terminate()
os.remove(self.cfg_file)
# this is the temp store where we told the server to put the files
s = store.StoreFactory.tmp()
for cid in s.list_container_ids():
s.delete(cid)
if os.path.exists(self.custom_zip_path):
os.remove(self.custom_zip_path)
def test_01_validation_singlepart(self):
notification = fixtures.APIFactory.incoming()
notification["links"][0]["url"] = self.appurl + "/test/download/file.pdf"
url = self.api_base + "validate?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "application/json"})
assert resp.status_code == 204
def test_02_validation_singlepart_fail(self):
# ways in which the validation http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't do this test yet
# 2. incorrect content-type header
notification = fixtures.APIFactory.incoming()
notification["links"][0]["url"] = self.appurl + "/test/download/file.pdf"
url = self.api_base + "validate?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "text/plain"})
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/json" in j["error"]
# 3. invalid json
resp = requests.post(url, data="this is not json", headers={"Content-Type" : "application/json"})
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Unable to parse" in j["error"]
def test_03_validation_multipart(self):
notification = fixtures.APIFactory.incoming()
del notification["links"]
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "validate?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 204
def test_04_validation_multipart_fail(self):
# ways in which the validation http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't do this test yet
# 2. incorrect content-type header on metadata/content parts
notification = fixtures.APIFactory.incoming()
del notification["links"]
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "validate?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "text/plain")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/json" in j["error"]
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "text/plain"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/zip" in j["error"]
# 3. invalid json
files = [
("metadata", ("metadata.json", "this string is not json", "application/json")),
("content", ("content.zip", open(example_package, "rb"), "text/plain"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Unable to parse" in j["error"]
def test_05_notification_singlepart(self):
notification = fixtures.APIFactory.incoming()
url = self.api_base + "notification?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "application/json"})
assert resp.status_code == 202
j = resp.json()
assert "status" in j
assert j["status"] == "accepted"
assert "id" in j
assert "location" in j
assert "location" in resp.headers
assert resp.headers["location"] == j["location"], (resp.headers["location"], j["location"])
def test_06_notification_singlepart_fail(self):
# ways in which the validation http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't do this test yet
# 2. incorrect content-type header
notification = fixtures.APIFactory.incoming()
url = self.api_base + "notification?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "text/plain"})
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/json" in j["error"]
# 3. invalid json
resp = requests.post(url, data="this is not json", headers={"Content-Type" : "application/json"})
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Unable to parse" in j["error"]
# 4. incorrectly structured json
obj = {"random" : "content"}
resp = requests.post(url, data=json.dumps(obj), headers={"Content-Type" : "application/json"})
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Field 'random' is not permitted at 'root'" in j["error"]
def test_07_notification_multipart(self):
notification = fixtures.APIFactory.incoming()
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "notification?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 202
j = resp.json()
assert "status" in j
assert j["status"] == "accepted"
assert "id" in j
assert "location" in j
assert "location" in resp.headers
assert resp.headers["location"] == j["location"], (resp.headers["location"], j["location"])
def test_08_notification_multipart_fail(self):
# ways in which the notification http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't do this test yet
# 2. incorrect content-type header on metadata/content parts
notification = fixtures.APIFactory.incoming()
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "notification?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "text/plain")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/json" in j["error"]
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "text/plain"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "application/zip" in j["error"]
# 3. invalid json
files = [
("metadata", ("metadata.json", "this string is not json", "application/json")),
("content", ("content.zip", open(example_package, "rb"), "text/plain"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Unable to parse" in j["error"]
# 4. validation exception on the content
fixtures.PackageFactory.make_custom_zip(self.custom_zip_path, corrupt_zip=True)
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(self.custom_zip_path, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
assert resp.status_code == 400
j = resp.json()
assert "error" in j
assert "Zip file is corrupt" in j["error"]
def test_09_get_notification(self):
notification = fixtures.APIFactory.incoming()
url = self.api_base + "notification?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "application/json"})
url = resp.headers["location"]
j = resp.json()
resp2 = requests.get(url + '?api_key=' + API_KEY)
assert resp2.status_code == 200
assert resp2.headers["content-type"] == "application/json"
j2 = resp2.json()
assert j2["id"] == j["id"]
assert j2["provider"]["id"] == "admin" # The default admin account owns this one
# FIXME: should do additional tests for retrieving routed notifications, but this is
# difficult to do at this stage
def test_10_get_notification_fail(self):
# ways in which the notification http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't test for this yet
# 2. invalid/not found notification id
url = self.api_base + "notification/2394120938412098348901275812u?api_key=" + API_KEY
resp = requests.get(url)
assert resp.status_code == 404
def test_11_get_store_content(self):
notification = fixtures.APIFactory.incoming()
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "notification?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
loc = resp.headers["location"]
resp2 = requests.get(loc + "/content?api_key=" + API_KEY, allow_redirects=False)
assert resp2.status_code == 200
def test_12_get_store_content_fail(self):
# ways in which the content http request can fail
# 1. invalid/wrong auth credentials
notification = fixtures.APIFactory.incoming()
example_package = fixtures.APIFactory.example_package_path()
url = self.api_base + "notification?api_key=" + API_KEY
files = [
("metadata", ("metadata.json", json.dumps(notification), "application/json")),
("content", ("content.zip", open(example_package, "rb"), "application/zip"))
]
resp = requests.post(url, files=files)
loc = resp.headers["location"]
resp2 = requests.get(loc + "/content")
assert resp2.status_code == 401
# 2. invalid/not found notification id
url = self.api_base + "notification/2394120938412098348901275812u/content?api_key=" + API_KEY
resp = requests.get(url)
assert resp.status_code == 404
'''
def test_13_get_public_content(self):
"""
FIXME: this test is no longer accurate. Needs updating.
:return:
"""
notification = fixtures.APIFactory.incoming()
url = self.api_base + "notification?api_key=" + API_KEY
resp = requests.post(url, data=json.dumps(notification), headers={"Content-Type" : "application/json"})
loc = resp.headers["location"]
resp2 = requests.get(loc + "/content/1?api_key=" + API_KEY, allow_redirects=False)
assert resp2.status_code == 303
'''
def test_14_get_public_content_fail(self):
# ways in which the content http request can fail
# 1. invalid/wrong auth credentials
# FIXME: we can't test for this yet
# 2. invalid/not found notification id
url = self.api_base + "notification/2394120938412098348901275812u/content/1?api_key=" + API_KEY
resp = requests.get(url)
assert resp.status_code == 404
def test_15_list_all(self):
url = self.api_base + "routed?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z"
resp = requests.get(url)
assert resp.status_code == 200
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert j["since"] == "2001-01-01T00:00:00Z"
assert j["page"] == 1
assert j["pageSize"] == 25
assert "timestamp" in j
assert "total" in j
assert "notifications" in j
def test_16_list_all_fail(self):
# ways in which the list all http request can fail
# 1. invalid/wrong auth credentials (if supplied)
# FIXME: we can't test for this yet
# 2. since parameter not supplied
url = self.api_base + "routed?api_key=" + API_KEY
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "since" in j["error"]
# 3. since parameter wrongly formatted
url = self.api_base + "routed?api_key=" + API_KEY + "&since=wednesday"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "since" in j["error"]
# 4. page/pageSize parameters wrongly formatted
url = self.api_base + "routed?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=0&pageSize=25"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page" in j["error"]
url = self.api_base + "routed?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=first&pageSize=25"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page" in j["error"]
url = self.api_base + "routed?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=1&pageSize=10000000"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page size" in j["error"]
url = self.api_base + "routed?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=1&pageSize=loads"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "pageSize" in j["error"]
def test_17_list_repository(self):
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=2&pageSize=67"
resp = requests.get(url)
assert resp.status_code == 200
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert j["since"] == "2001-01-01T00:00:00Z"
assert j["page"] == 2
assert j["pageSize"] == 67
assert "timestamp" in j
assert "total" in j
assert "notifications" in j
def test_18_list_repository_fail(self):
# ways in which the list repository http request can fail
# 1. invalid/wrong auth credentials (if supplied)
# FIXME: we can't test for this yet
# 2. since parameter not supplied
url = self.api_base + "routed/repo1?api_key=" + API_KEY
resp = requests.get(url)
assert resp.status_code == 400, resp.status_code
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "since" in j["error"]
# 3. since parameter wrongly formatted
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=wednesday"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "since" in j["error"]
# 4. page/pageSize parameters wrongly formatted
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=0&pageSize=25"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page" in j["error"]
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=first&pageSize=25"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page" in j["error"]
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=1&pageSize=10000000"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "page size" in j["error"]
url = self.api_base + "routed/repo1?api_key=" + API_KEY + "&since=2001-01-01T00:00:00Z&page=1&pageSize=loads"
resp = requests.get(url)
assert resp.status_code == 400
assert resp.headers["content-type"] == "application/json"
j = resp.json()
assert "error" in j
assert "pageSize" in j["error"]
| 42.248954
| 138
| 0.617331
| 2,554
| 20,195
| 4.768598
| 0.111198
| 0.033993
| 0.039084
| 0.054192
| 0.807538
| 0.799573
| 0.79087
| 0.780852
| 0.780852
| 0.776911
| 0
| 0.032803
| 0.255806
| 20,195
| 478
| 139
| 42.248954
| 0.777563
| 0.118594
| 0
| 0.705202
| 0
| 0
| 0.221332
| 0.059385
| 0
| 0
| 0
| 0.006276
| 0.361272
| 1
| 0.054913
| false
| 0
| 0.023121
| 0
| 0.080925
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6eab1dd67337c71809a43df441209a98ea217cc9
| 10,592
|
py
|
Python
|
tests/test_certificate_list_builder.py
|
wbond/crlbuilder
|
87b52684fc99451dd3c6547807f191936441f982
|
[
"MIT"
] | 15
|
2015-08-11T02:15:10.000Z
|
2021-09-19T20:00:57.000Z
|
tests/test_certificate_list_builder.py
|
wbond/crlbuilder
|
87b52684fc99451dd3c6547807f191936441f982
|
[
"MIT"
] | 6
|
2017-10-30T00:53:32.000Z
|
2021-06-15T10:51:35.000Z
|
tests/test_certificate_list_builder.py
|
wbond/crlbuilder
|
87b52684fc99451dd3c6547807f191936441f982
|
[
"MIT"
] | 4
|
2020-06-16T20:07:45.000Z
|
2020-11-07T02:34:39.000Z
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import unittest
import os
from datetime import datetime
from asn1crypto import crl
from asn1crypto.util import timezone
from oscrypto import asymmetric
from crlbuilder import CertificateListBuilder
from ._unittest_compat import patch
patch()
tests_root = os.path.dirname(__file__)
fixtures_dir = os.path.join(tests_root, 'fixtures')
class CertificateListBuilderTests(unittest.TestCase):
def test_build_basic_crl(self):
root_private_key = asymmetric.load_private_key(os.path.join(fixtures_dir, 'root.key'), 'password123')
root_certificate = asymmetric.load_certificate(os.path.join(fixtures_dir, 'root.crt'))
builder = CertificateListBuilder(
'http://crl.example.com',
root_certificate,
50000
)
revoked_at = datetime(2015, 8, 1, 12, 0, 0, tzinfo=timezone.utc)
builder.add_certificate(29232181, revoked_at, 'key_compromise')
certificate_list = builder.build(root_private_key)
der_bytes = certificate_list.dump()
new_cert_list = crl.CertificateList.load(der_bytes)
tbs_cert_list = new_cert_list['tbs_cert_list']
revoked_certificates = tbs_cert_list['revoked_certificates']
now = datetime.now(timezone.utc)
self.assertEqual('v3', tbs_cert_list['version'].native)
self.assertEqual('rsassa_pkcs1v15', tbs_cert_list['signature'].signature_algo)
self.assertEqual('sha256', tbs_cert_list['signature'].hash_algo)
self.assertEqual(root_certificate.asn1.subject, tbs_cert_list['issuer'])
self.assertEqual(root_certificate.asn1.subject.sha256, tbs_cert_list['issuer'].sha256)
self.assertGreaterEqual(now, tbs_cert_list['this_update'].native)
self.assertLess(now, tbs_cert_list['next_update'].native)
self.assertEqual(set(['issuing_distribution_point']), new_cert_list.critical_extensions)
self.assertEqual(1, len(revoked_certificates))
revoked_cert = revoked_certificates[0]
self.assertEqual(29232181, revoked_cert['user_certificate'].native)
self.assertEqual(revoked_at, revoked_cert['revocation_date'].native)
self.assertEqual(set(), revoked_cert.critical_extensions)
self.assertEqual('key_compromise', revoked_cert.crl_reason_value.native)
self.assertEqual(None, revoked_cert.invalidity_date_value)
self.assertEqual(None, revoked_cert.certificate_issuer_value)
self.assertEqual(None, revoked_cert.issuer_name)
self.assertEqual(None, new_cert_list.issuer_alt_name_value)
self.assertEqual(50000, new_cert_list.crl_number_value.native)
self.assertEqual(None, new_cert_list.delta_crl_indicator_value)
self.assertEqual('full_name', new_cert_list.issuing_distribution_point_value['distribution_point'].name)
self.assertEqual(
'uniform_resource_identifier',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].name
)
self.assertEqual(
'http://crl.example.com',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].native
)
self.assertEqual(root_certificate.asn1.key_identifier, new_cert_list.authority_key_identifier)
self.assertEqual(None, new_cert_list.freshest_crl_value)
self.assertEqual(None, new_cert_list.authority_information_access_value)
def test_build_indirect_crl(self):
root_certificate = asymmetric.load_certificate(os.path.join(fixtures_dir, 'root.crt'))
crl_issuer_private_key = asymmetric.load_private_key(
os.path.join(fixtures_dir, 'crl_issuer.key'),
'password123'
)
crl_issuer_certificate = asymmetric.load_certificate(os.path.join(fixtures_dir, 'crl_issuer.crt'))
builder = CertificateListBuilder(
'http://crl.example.com',
crl_issuer_certificate,
50000
)
builder.certificate_issuer = root_certificate
builder.issuer_certificate_url = 'http://download.example.com/crl_issuer'
builder.delta_crl_url = 'http://crl.example.com/delta'
revoked_at = datetime(2015, 8, 1, 12, 0, 0, tzinfo=timezone.utc)
builder.add_certificate(29232181, revoked_at, 'key_compromise')
revoked_at_2 = datetime(2014, 12, 29, 8, 0, 0, tzinfo=timezone.utc)
builder.add_certificate(12345678, revoked_at_2, 'key_compromise')
certificate_list = builder.build(crl_issuer_private_key)
der_bytes = certificate_list.dump()
new_cert_list = crl.CertificateList.load(der_bytes)
tbs_cert_list = new_cert_list['tbs_cert_list']
revoked_certificates = tbs_cert_list['revoked_certificates']
now = datetime.now(timezone.utc)
self.assertEqual('v3', tbs_cert_list['version'].native)
self.assertEqual('rsassa_pkcs1v15', tbs_cert_list['signature'].signature_algo)
self.assertEqual('sha256', tbs_cert_list['signature'].hash_algo)
self.assertEqual(crl_issuer_certificate.asn1.subject, tbs_cert_list['issuer'])
self.assertEqual(crl_issuer_certificate.asn1.subject.sha256, tbs_cert_list['issuer'].sha256)
self.assertGreaterEqual(now, tbs_cert_list['this_update'].native)
self.assertLess(now, tbs_cert_list['next_update'].native)
self.assertEqual(set(['issuing_distribution_point']), new_cert_list.critical_extensions)
self.assertEqual(2, len(revoked_certificates))
revoked_cert_1 = revoked_certificates[0]
self.assertEqual(29232181, revoked_cert_1['user_certificate'].native)
self.assertEqual(revoked_at, revoked_cert_1['revocation_date'].native)
self.assertEqual(set(['certificate_issuer']), revoked_cert_1.critical_extensions)
self.assertEqual('key_compromise', revoked_cert_1.crl_reason_value.native)
self.assertEqual(None, revoked_cert_1.invalidity_date_value)
self.assertEqual('directory_name', revoked_cert_1.certificate_issuer_value[0].name)
self.assertNotEqual(None, revoked_cert_1.certificate_issuer_value)
self.assertEqual(root_certificate.asn1.subject, revoked_cert_1.issuer_name)
revoked_cert_2 = revoked_certificates[1]
self.assertEqual(12345678, revoked_cert_2['user_certificate'].native)
self.assertEqual(revoked_at_2, revoked_cert_2['revocation_date'].native)
self.assertEqual(set(), revoked_cert_2.critical_extensions)
self.assertEqual('key_compromise', revoked_cert_2.crl_reason_value.native)
self.assertEqual(None, revoked_cert_2.invalidity_date_value)
self.assertEqual(None, revoked_cert_2.certificate_issuer_value)
self.assertEqual(None, revoked_cert_2.issuer_name)
self.assertEqual(None, new_cert_list.issuer_alt_name_value)
self.assertEqual(50000, new_cert_list.crl_number_value.native)
self.assertEqual(None, new_cert_list.delta_crl_indicator_value)
self.assertEqual('full_name', new_cert_list.issuing_distribution_point_value['distribution_point'].name)
self.assertEqual(
'uniform_resource_identifier',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].name
)
self.assertEqual(
'http://crl.example.com',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].native
)
self.assertEqual(crl_issuer_certificate.asn1.key_identifier, new_cert_list.authority_key_identifier)
self.assertEqual('http://crl.example.com/delta', new_cert_list.delta_crl_distribution_points[0].url)
self.assertEqual(['http://download.example.com/crl_issuer'], new_cert_list.issuer_cert_urls)
def test_build_delta_crl(self):
root_certificate = asymmetric.load_certificate(os.path.join(fixtures_dir, 'root.crt'))
crl_issuer_private_key = asymmetric.load_private_key(
os.path.join(fixtures_dir, 'crl_issuer.key'),
'password123'
)
crl_issuer_certificate = asymmetric.load_certificate(os.path.join(fixtures_dir, 'crl_issuer.crt'))
builder = CertificateListBuilder(
'http://crl.example.com/delta',
crl_issuer_certificate,
50001
)
builder.certificate_issuer = root_certificate
builder.issuer_certificate_url = 'http://download.example.com/crl_issuer'
builder.delta_of = 50000
certificate_list = builder.build(crl_issuer_private_key)
der_bytes = certificate_list.dump()
new_cert_list = crl.CertificateList.load(der_bytes)
tbs_cert_list = new_cert_list['tbs_cert_list']
revoked_certificates = tbs_cert_list['revoked_certificates']
now = datetime.now(timezone.utc)
self.assertEqual('v3', tbs_cert_list['version'].native)
self.assertEqual('rsassa_pkcs1v15', tbs_cert_list['signature'].signature_algo)
self.assertEqual('sha256', tbs_cert_list['signature'].hash_algo)
self.assertEqual(crl_issuer_certificate.asn1.subject, tbs_cert_list['issuer'])
self.assertEqual(crl_issuer_certificate.asn1.subject.sha256, tbs_cert_list['issuer'].sha256)
self.assertGreaterEqual(now, tbs_cert_list['this_update'].native)
self.assertLess(now, tbs_cert_list['next_update'].native)
self.assertEqual(set(['issuing_distribution_point', 'delta_crl_indicator']), new_cert_list.critical_extensions)
self.assertEqual(0, len(revoked_certificates))
self.assertEqual(None, new_cert_list.issuer_alt_name_value)
self.assertEqual(50001, new_cert_list.crl_number_value.native)
self.assertEqual(50000, new_cert_list.delta_crl_indicator_value.native)
self.assertEqual('full_name', new_cert_list.issuing_distribution_point_value['distribution_point'].name)
self.assertEqual(
'uniform_resource_identifier',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].name
)
self.assertEqual(
'http://crl.example.com/delta',
new_cert_list.issuing_distribution_point_value['distribution_point'].chosen[0].native
)
self.assertEqual(crl_issuer_certificate.asn1.key_identifier, new_cert_list.authority_key_identifier)
self.assertEqual([], new_cert_list.delta_crl_distribution_points)
self.assertEqual(['http://download.example.com/crl_issuer'], new_cert_list.issuer_cert_urls)
| 50.199052
| 119
| 0.729796
| 1,284
| 10,592
| 5.647196
| 0.102804
| 0.142739
| 0.054613
| 0.022342
| 0.877534
| 0.859468
| 0.82499
| 0.792856
| 0.707489
| 0.657564
| 0
| 0.024321
| 0.169279
| 10,592
| 210
| 120
| 50.438095
| 0.79975
| 0.001227
| 0
| 0.575581
| 0
| 0
| 0.132552
| 0.015033
| 0
| 0
| 0
| 0
| 0.44186
| 1
| 0.017442
| false
| 0.017442
| 0.052326
| 0
| 0.075581
| 0.005814
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ed20138123ff9ff43e2aec7ce62325a1643eea9
| 254
|
py
|
Python
|
plugins/cherwell/komand_cherwell/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/cherwell/komand_cherwell/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/cherwell/komand_cherwell/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .create_incident.action import CreateIncident
from .lookup_incident.action import LookupIncident
from .perform_ad_hoc_search.action import PerformAdHocSearch
from .update_incident.action import UpdateIncident
| 42.333333
| 60
| 0.862205
| 33
| 254
| 6.454545
| 0.666667
| 0.225352
| 0.28169
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098425
| 254
| 5
| 61
| 50.8
| 0.930131
| 0.145669
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6e517a0d2ceb3582ba7cf2bb24bdf5ae9548fe6f
| 45,580
|
py
|
Python
|
data/saveMaya/mayaScript2.py
|
n1ckfg/KinectToPin
|
dcb2597d6f34769b7ffebb3a24024bd971e8d53f
|
[
"MIT"
] | 26
|
2015-04-10T06:27:26.000Z
|
2022-02-06T04:24:17.000Z
|
data/saveMaya/mayaScript2.py
|
n1ckfg/KinectToPin
|
dcb2597d6f34769b7ffebb3a24024bd971e8d53f
|
[
"MIT"
] | null | null | null |
data/saveMaya/mayaScript2.py
|
n1ckfg/KinectToPin
|
dcb2597d6f34769b7ffebb3a24024bd971e8d53f
|
[
"MIT"
] | 11
|
2016-08-13T14:13:26.000Z
|
2020-02-19T16:23:50.000Z
|
from maya.cmds import *
from random import uniform as rnd
#select(all=True)
#delete()
playbackOptions(minTime="0", maxTime="44")
#grav = gravity()
spaceLocator(name="head")
currentTime(0)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(1)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(2)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(3)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(4)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(5)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(6)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(7)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(8)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(9)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(10)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(11)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(12)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(13)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(14)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(15)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(16)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(17)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(18)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(19)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(20)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(21)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(22)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(23)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(24)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(25)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(26)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(27)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(28)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(29)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(30)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(31)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(32)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(33)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(34)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(35)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(36)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(37)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(38)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(39)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(40)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(41)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(42)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(43)
move(7.006687, 8.273956,11.346117)
setKeyframe()
currentTime(44)
move(7.006687, 8.273956,11.346117)
setKeyframe()
spaceLocator(name="neck")
currentTime(0)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(1)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(2)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(3)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(4)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(5)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(6)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(7)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(8)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(9)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(10)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(11)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(12)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(13)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(14)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(15)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(16)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(17)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(18)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(19)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(20)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(21)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(22)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(23)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(24)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(25)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(26)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(27)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(28)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(29)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(30)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(31)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(32)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(33)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(34)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(35)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(36)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(37)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(38)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(39)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(40)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(41)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(42)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(43)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
currentTime(44)
move(6.918183, 7.1631947,11.377573)
setKeyframe()
spaceLocator(name="torso")
currentTime(0)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(1)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(2)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(3)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(4)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(5)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(6)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(7)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(8)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(9)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(10)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(11)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(12)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(13)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(14)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(15)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(16)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(17)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(18)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(19)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(20)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(21)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(22)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(23)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(24)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(25)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(26)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(27)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(28)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(29)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(30)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(31)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(32)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(33)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(34)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(35)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(36)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(37)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(38)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(39)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(40)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(41)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(42)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(43)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
currentTime(44)
move(6.7991977, 6.061902,11.431746)
setKeyframe()
spaceLocator(name="l_shoulder")
currentTime(0)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(1)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(2)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(3)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(4)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(5)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(6)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(7)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(8)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(9)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(10)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(11)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(12)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(13)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(14)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(15)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(16)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(17)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(18)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(19)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(20)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(21)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(22)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(23)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(24)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(25)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(26)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(27)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(28)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(29)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(30)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(31)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(32)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(33)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(34)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(35)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(36)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(37)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(38)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(39)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(40)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(41)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(42)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(43)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
currentTime(44)
move(6.570758, 7.3247175,10.807184)
setKeyframe()
spaceLocator(name="l_elbow")
currentTime(0)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(1)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(2)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(3)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(4)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(5)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(6)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(7)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(8)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(9)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(10)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(11)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(12)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(13)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(14)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(15)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(16)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(17)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(18)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(19)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(20)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(21)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(22)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(23)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(24)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(25)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(26)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(27)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(28)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(29)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(30)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(31)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(32)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(33)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(34)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(35)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(36)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(37)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(38)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(39)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(40)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(41)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(42)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(43)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
currentTime(44)
move(6.1841893, 6.6290708,9.524011)
setKeyframe()
spaceLocator(name="l_hand")
currentTime(0)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(1)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(2)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(3)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(4)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(5)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(6)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(7)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(8)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(9)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(10)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(11)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(12)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(13)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(14)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(15)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(16)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(17)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(18)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(19)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(20)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(21)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(22)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(23)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(24)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(25)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(26)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(27)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(28)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(29)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(30)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(31)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(32)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(33)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(34)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(35)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(36)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(37)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(38)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(39)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(40)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(41)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(42)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(43)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
currentTime(44)
move(5.8309507, 5.294569,8.7813015)
setKeyframe()
spaceLocator(name="r_shoulder")
currentTime(0)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(1)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(2)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(3)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(4)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(5)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(6)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(7)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(8)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(9)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(10)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(11)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(12)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(13)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(14)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(15)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(16)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(17)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(18)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(19)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(20)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(21)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(22)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(23)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(24)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(25)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(26)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(27)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(28)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(29)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(30)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(31)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(32)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(33)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(34)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(35)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(36)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(37)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(38)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(39)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(40)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(41)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(42)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(43)
move(7.232436, 7.017094,11.947964)
setKeyframe()
currentTime(44)
move(7.232436, 7.017094,11.947964)
setKeyframe()
spaceLocator(name="r_elbow")
currentTime(0)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(1)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(2)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(3)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(4)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(5)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(6)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(7)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(8)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(9)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(10)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(11)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(12)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(13)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(14)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(15)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(16)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(17)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(18)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(19)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(20)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(21)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(22)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(23)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(24)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(25)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(26)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(27)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(28)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(29)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(30)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(31)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(32)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(33)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(34)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(35)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(36)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(37)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(38)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(39)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(40)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(41)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(42)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(43)
move(8.221722, 7.254464,12.231777)
setKeyframe()
currentTime(44)
move(8.221722, 7.254464,12.231777)
setKeyframe()
spaceLocator(name="r_hand")
currentTime(0)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(1)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(2)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(3)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(4)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(5)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(6)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(7)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(8)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(9)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(10)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(11)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(12)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(13)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(14)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(15)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(16)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(17)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(18)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(19)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(20)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(21)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(22)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(23)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(24)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(25)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(26)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(27)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(28)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(29)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(30)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(31)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(32)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(33)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(34)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(35)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(36)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(37)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(38)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(39)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(40)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(41)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(42)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(43)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
currentTime(44)
move(9.194487, 8.2138815,11.937321)
setKeyframe()
spaceLocator(name="l_hip")
currentTime(0)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(1)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(2)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(3)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(4)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(5)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(6)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(7)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(8)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(9)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(10)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(11)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(12)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(13)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(14)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(15)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(16)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(17)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(18)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(19)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(20)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(21)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(22)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(23)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(24)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(25)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(26)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(27)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(28)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(29)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(30)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(31)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(32)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(33)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(34)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(35)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(36)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(37)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(38)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(39)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(40)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(41)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(42)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(43)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
currentTime(44)
move(6.4632087, 4.9987674,11.129921)
setKeyframe()
spaceLocator(name="l_knee")
currentTime(0)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(1)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(2)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(3)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(4)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(5)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(6)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(7)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(8)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(9)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(10)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(11)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(12)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(13)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(14)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(15)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(16)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(17)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(18)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(19)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(20)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(21)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(22)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(23)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(24)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(25)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(26)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(27)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(28)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(29)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(30)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(31)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(32)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(33)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(34)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(35)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(36)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(37)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(38)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(39)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(40)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(41)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(42)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(43)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
currentTime(44)
move(7.2333236, 3.015377,11.085175)
setKeyframe()
spaceLocator(name="l_foot")
currentTime(0)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(1)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(2)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(3)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(4)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(5)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(6)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(7)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(8)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(9)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(10)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(11)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(12)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(13)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(14)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(15)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(16)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(17)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(18)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(19)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(20)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(21)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(22)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(23)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(24)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(25)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(26)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(27)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(28)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(29)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(30)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(31)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(32)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(33)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(34)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(35)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(36)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(37)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(38)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(39)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(40)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(41)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(42)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(43)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
currentTime(44)
move(7.1088, 1.1246662,11.631865)
setKeyframe()
spaceLocator(name="r_hip")
currentTime(0)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(1)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(2)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(3)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(4)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(5)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(6)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(7)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(8)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(9)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(10)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(11)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(12)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(13)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(14)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(15)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(16)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(17)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(18)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(19)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(20)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(21)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(22)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(23)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(24)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(25)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(26)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(27)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(28)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(29)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(30)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(31)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(32)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(33)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(34)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(35)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(36)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(37)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(38)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(39)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(40)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(41)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(42)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(43)
move(6.886347, 4.944899,11.841916)
setKeyframe()
currentTime(44)
move(6.886347, 4.944899,11.841916)
setKeyframe()
spaceLocator(name="r_knee")
currentTime(0)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(1)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(2)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(3)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(4)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(5)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(6)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(7)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(8)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(9)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(10)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(11)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(12)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(13)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(14)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(15)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(16)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(17)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(18)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(19)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(20)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(21)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(22)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(23)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(24)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(25)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(26)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(27)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(28)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(29)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(30)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(31)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(32)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(33)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(34)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(35)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(36)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(37)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(38)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(39)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(40)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(41)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(42)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(43)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
currentTime(44)
move(5.7724905, 3.7196121,10.935054)
setKeyframe()
spaceLocator(name="r_foot")
currentTime(0)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(1)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(2)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(3)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(4)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(5)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(6)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(7)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(8)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(9)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(10)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(11)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(12)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(13)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(14)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(15)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(16)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(17)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(18)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(19)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(20)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(21)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(22)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(23)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(24)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(25)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(26)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(27)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(28)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(29)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(30)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(31)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(32)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(33)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(34)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(35)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(36)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(37)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(38)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(39)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(40)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(41)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(42)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(43)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
currentTime(44)
move(4.8343678, 1.8145638,10.789901)
setKeyframe()
#floor = polyPlane(w=30,h=30)
#rigidBody(passive=True)
#move(0,0,0)
| 22.223306
| 42
| 0.759719
| 6,841
| 45,580
| 5.060079
| 0.018857
| 0.419459
| 0.0143
| 0.0156
| 0.975069
| 0.946817
| 0.946817
| 0.946817
| 0.930437
| 0
| 0
| 0.393682
| 0.074901
| 45,580
| 2,050
| 43
| 22.234146
| 0.427264
| 0.002238
| 0
| 0.991189
| 0
| 0
| 0.002143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.000979
| 0
| 0.000979
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
954757781e7f5b34d3696e32bfd0f2869b4d005d
| 212,108
|
py
|
Python
|
sdk/python/pulumi_alicloud/cs/kubernetes.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/cs/kubernetes.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/cs/kubernetes.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['KubernetesArgs', 'Kubernetes']
@pulumi.input_type
class KubernetesArgs:
def __init__(__self__, *,
master_instance_types: pulumi.Input[Sequence[pulumi.Input[str]]],
master_vswitch_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
worker_instance_types: pulumi.Input[Sequence[pulumi.Input[str]]],
worker_number: pulumi.Input[int],
worker_vswitch_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
addons: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]] = None,
api_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
client_cert: Optional[pulumi.Input[str]] = None,
client_key: Optional[pulumi.Input[str]] = None,
cluster_ca_cert: Optional[pulumi.Input[str]] = None,
cluster_domain: Optional[pulumi.Input[str]] = None,
cpu_policy: Optional[pulumi.Input[str]] = None,
custom_san: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
enable_ssh: Optional[pulumi.Input[bool]] = None,
exclude_autoscaler_nodes: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
install_cloud_monitor: Optional[pulumi.Input[bool]] = None,
is_enterprise_security_group: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
load_balancer_spec: Optional[pulumi.Input[str]] = None,
master_auto_renew: Optional[pulumi.Input[bool]] = None,
master_auto_renew_period: Optional[pulumi.Input[int]] = None,
master_disk_category: Optional[pulumi.Input[str]] = None,
master_disk_performance_level: Optional[pulumi.Input[str]] = None,
master_disk_size: Optional[pulumi.Input[int]] = None,
master_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
master_instance_charge_type: Optional[pulumi.Input[str]] = None,
master_period: Optional[pulumi.Input[int]] = None,
master_period_unit: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
new_nat_gateway: Optional[pulumi.Input[bool]] = None,
node_cidr_mask: Optional[pulumi.Input[int]] = None,
node_name_mode: Optional[pulumi.Input[str]] = None,
node_port_range: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
pod_cidr: Optional[pulumi.Input[str]] = None,
pod_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
proxy_mode: Optional[pulumi.Input[str]] = None,
rds_instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input['KubernetesRuntimeArgs']] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
service_account_issuer: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None,
slb_internet_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
taints: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]] = None,
timezone: Optional[pulumi.Input[str]] = None,
user_ca: Optional[pulumi.Input[str]] = None,
user_data: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
worker_auto_renew: Optional[pulumi.Input[bool]] = None,
worker_auto_renew_period: Optional[pulumi.Input[int]] = None,
worker_data_disk_category: Optional[pulumi.Input[str]] = None,
worker_data_disk_size: Optional[pulumi.Input[int]] = None,
worker_data_disks: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]] = None,
worker_disk_category: Optional[pulumi.Input[str]] = None,
worker_disk_performance_level: Optional[pulumi.Input[str]] = None,
worker_disk_size: Optional[pulumi.Input[int]] = None,
worker_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
worker_instance_charge_type: Optional[pulumi.Input[str]] = None,
worker_period: Optional[pulumi.Input[int]] = None,
worker_period_unit: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Kubernetes resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[Sequence[pulumi.Input[str]]] worker_instance_types: The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[int] worker_number: The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_audiences: A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
:param pulumi.Input[str] availability_zone: The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
:param pulumi.Input[str] client_cert: The path of client certificate, like `~/.kube/client-cert.pem`.
:param pulumi.Input[str] client_key: The path of client key, like `~/.kube/client-key.pem`.
:param pulumi.Input[str] cluster_ca_cert: The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
:param pulumi.Input[str] cluster_domain: Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
:param pulumi.Input[str] cpu_policy: Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
:param pulumi.Input[str] custom_san: Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
:param pulumi.Input[bool] deletion_protection: Whether to enable cluster deletion protection.
:param pulumi.Input[bool] enable_ssh: Enable login to the node through SSH. Default to `false`.
:param pulumi.Input[bool] exclude_autoscaler_nodes: Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
:param pulumi.Input[str] image_id: Custom Image support. Must based on CentOS7 or AliyunLinux2.
:param pulumi.Input[bool] install_cloud_monitor: Install cloud monitor agent on ECS. Default to `true`.
:param pulumi.Input[bool] is_enterprise_security_group: Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
:param pulumi.Input[str] key_name: The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[str] kube_config: The path of kube config, like `~/.kube/config`.
:param pulumi.Input[str] load_balancer_spec: The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
:param pulumi.Input[bool] master_auto_renew: Enable master payment auto-renew, defaults to false.
:param pulumi.Input[int] master_auto_renew_period: Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[str] master_disk_category: The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] master_disk_performance_level: Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] master_disk_size: The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
:param pulumi.Input[str] master_disk_snapshot_policy_id: Master node system disk auto snapshot policy.
:param pulumi.Input[str] master_instance_charge_type: Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
:param pulumi.Input[int] master_period: Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] master_period_unit: Master payment period unit, the valid value is `Month`.
:param pulumi.Input[str] name: The kubernetes cluster's name. It is unique in one Alicloud account.
:param pulumi.Input[bool] new_nat_gateway: Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
:param pulumi.Input[int] node_cidr_mask: The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
:param pulumi.Input[str] node_name_mode: Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
:param pulumi.Input[str] node_port_range: The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
:param pulumi.Input[str] os_type: The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
:param pulumi.Input[str] password: The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] platform: The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
:param pulumi.Input[str] pod_cidr: - [Flannel Specific] The CIDR block for the pod network when using Flannel.
:param pulumi.Input[Sequence[pulumi.Input[str]]] pod_vswitch_ids: - [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
:param pulumi.Input[str] proxy_mode: Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rds_instances: RDS instance list, You can choose which RDS instances whitelist to add instances to.
:param pulumi.Input[str] resource_group_id: The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
:param pulumi.Input['KubernetesRuntimeArgs'] runtime: The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
:param pulumi.Input[str] security_group_id: The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
:param pulumi.Input[str] service_account_issuer: The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
:param pulumi.Input[str] service_cidr: The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
:param pulumi.Input[bool] slb_internet_enabled: Whether to create internet load balancer for API Server. Default to true.
:param pulumi.Input[Mapping[str, Any]] tags: Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]] taints: Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
:param pulumi.Input[str] timezone: When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
:param pulumi.Input[str] user_ca: The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
:param pulumi.Input[str] user_data: Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
:param pulumi.Input[str] version: Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
:param pulumi.Input[bool] worker_auto_renew: Enable worker payment auto-renew, defaults to false.
:param pulumi.Input[int] worker_auto_renew_period: Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]] worker_data_disks: The data disk configurations of worker nodes, such as the disk type and disk size.
:param pulumi.Input[str] worker_disk_category: The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] worker_disk_performance_level: Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] worker_disk_size: The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
:param pulumi.Input[str] worker_disk_snapshot_policy_id: Worker node system disk auto snapshot policy.
:param pulumi.Input[str] worker_instance_charge_type: Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
:param pulumi.Input[int] worker_period: Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] worker_period_unit: Worker payment period unit, the valid value is `Month`.
"""
pulumi.set(__self__, "master_instance_types", master_instance_types)
pulumi.set(__self__, "master_vswitch_ids", master_vswitch_ids)
pulumi.set(__self__, "worker_instance_types", worker_instance_types)
pulumi.set(__self__, "worker_number", worker_number)
pulumi.set(__self__, "worker_vswitch_ids", worker_vswitch_ids)
if addons is not None:
pulumi.set(__self__, "addons", addons)
if api_audiences is not None:
pulumi.set(__self__, "api_audiences", api_audiences)
if availability_zone is not None:
pulumi.set(__self__, "availability_zone", availability_zone)
if client_cert is not None:
pulumi.set(__self__, "client_cert", client_cert)
if client_key is not None:
pulumi.set(__self__, "client_key", client_key)
if cluster_ca_cert is not None:
pulumi.set(__self__, "cluster_ca_cert", cluster_ca_cert)
if cluster_domain is not None:
pulumi.set(__self__, "cluster_domain", cluster_domain)
if cpu_policy is not None:
pulumi.set(__self__, "cpu_policy", cpu_policy)
if custom_san is not None:
pulumi.set(__self__, "custom_san", custom_san)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if enable_ssh is not None:
pulumi.set(__self__, "enable_ssh", enable_ssh)
if exclude_autoscaler_nodes is not None:
pulumi.set(__self__, "exclude_autoscaler_nodes", exclude_autoscaler_nodes)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if install_cloud_monitor is not None:
pulumi.set(__self__, "install_cloud_monitor", install_cloud_monitor)
if is_enterprise_security_group is not None:
pulumi.set(__self__, "is_enterprise_security_group", is_enterprise_security_group)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if kms_encrypted_password is not None:
pulumi.set(__self__, "kms_encrypted_password", kms_encrypted_password)
if kms_encryption_context is not None:
pulumi.set(__self__, "kms_encryption_context", kms_encryption_context)
if kube_config is not None:
pulumi.set(__self__, "kube_config", kube_config)
if load_balancer_spec is not None:
pulumi.set(__self__, "load_balancer_spec", load_balancer_spec)
if master_auto_renew is not None:
pulumi.set(__self__, "master_auto_renew", master_auto_renew)
if master_auto_renew_period is not None:
pulumi.set(__self__, "master_auto_renew_period", master_auto_renew_period)
if master_disk_category is not None:
pulumi.set(__self__, "master_disk_category", master_disk_category)
if master_disk_performance_level is not None:
pulumi.set(__self__, "master_disk_performance_level", master_disk_performance_level)
if master_disk_size is not None:
pulumi.set(__self__, "master_disk_size", master_disk_size)
if master_disk_snapshot_policy_id is not None:
pulumi.set(__self__, "master_disk_snapshot_policy_id", master_disk_snapshot_policy_id)
if master_instance_charge_type is not None:
pulumi.set(__self__, "master_instance_charge_type", master_instance_charge_type)
if master_period is not None:
pulumi.set(__self__, "master_period", master_period)
if master_period_unit is not None:
pulumi.set(__self__, "master_period_unit", master_period_unit)
if name is not None:
pulumi.set(__self__, "name", name)
if name_prefix is not None:
warnings.warn("""Field 'name_prefix' has been deprecated from provider version 1.75.0.""", DeprecationWarning)
pulumi.log.warn("""name_prefix is deprecated: Field 'name_prefix' has been deprecated from provider version 1.75.0.""")
if name_prefix is not None:
pulumi.set(__self__, "name_prefix", name_prefix)
if new_nat_gateway is not None:
pulumi.set(__self__, "new_nat_gateway", new_nat_gateway)
if node_cidr_mask is not None:
pulumi.set(__self__, "node_cidr_mask", node_cidr_mask)
if node_name_mode is not None:
pulumi.set(__self__, "node_name_mode", node_name_mode)
if node_port_range is not None:
pulumi.set(__self__, "node_port_range", node_port_range)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if password is not None:
pulumi.set(__self__, "password", password)
if platform is not None:
pulumi.set(__self__, "platform", platform)
if pod_cidr is not None:
pulumi.set(__self__, "pod_cidr", pod_cidr)
if pod_vswitch_ids is not None:
pulumi.set(__self__, "pod_vswitch_ids", pod_vswitch_ids)
if proxy_mode is not None:
pulumi.set(__self__, "proxy_mode", proxy_mode)
if rds_instances is not None:
pulumi.set(__self__, "rds_instances", rds_instances)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if runtime is not None:
pulumi.set(__self__, "runtime", runtime)
if security_group_id is not None:
pulumi.set(__self__, "security_group_id", security_group_id)
if service_account_issuer is not None:
pulumi.set(__self__, "service_account_issuer", service_account_issuer)
if service_cidr is not None:
pulumi.set(__self__, "service_cidr", service_cidr)
if slb_internet_enabled is not None:
pulumi.set(__self__, "slb_internet_enabled", slb_internet_enabled)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
if timezone is not None:
pulumi.set(__self__, "timezone", timezone)
if user_ca is not None:
pulumi.set(__self__, "user_ca", user_ca)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
if version is not None:
pulumi.set(__self__, "version", version)
if worker_auto_renew is not None:
pulumi.set(__self__, "worker_auto_renew", worker_auto_renew)
if worker_auto_renew_period is not None:
pulumi.set(__self__, "worker_auto_renew_period", worker_auto_renew_period)
if worker_data_disk_category is not None:
pulumi.set(__self__, "worker_data_disk_category", worker_data_disk_category)
if worker_data_disk_size is not None:
pulumi.set(__self__, "worker_data_disk_size", worker_data_disk_size)
if worker_data_disks is not None:
pulumi.set(__self__, "worker_data_disks", worker_data_disks)
if worker_disk_category is not None:
pulumi.set(__self__, "worker_disk_category", worker_disk_category)
if worker_disk_performance_level is not None:
pulumi.set(__self__, "worker_disk_performance_level", worker_disk_performance_level)
if worker_disk_size is not None:
pulumi.set(__self__, "worker_disk_size", worker_disk_size)
if worker_disk_snapshot_policy_id is not None:
pulumi.set(__self__, "worker_disk_snapshot_policy_id", worker_disk_snapshot_policy_id)
if worker_instance_charge_type is not None:
pulumi.set(__self__, "worker_instance_charge_type", worker_instance_charge_type)
if worker_period is not None:
pulumi.set(__self__, "worker_period", worker_period)
if worker_period_unit is not None:
pulumi.set(__self__, "worker_period_unit", worker_period_unit)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterVswitchIds")
def master_vswitch_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "master_vswitch_ids")
@master_vswitch_ids.setter
def master_vswitch_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "master_vswitch_ids", value)
@property
@pulumi.getter(name="workerInstanceTypes")
def worker_instance_types(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "worker_instance_types")
@worker_instance_types.setter
def worker_instance_types(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "worker_instance_types", value)
@property
@pulumi.getter(name="workerNumber")
def worker_number(self) -> pulumi.Input[int]:
"""
The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
"""
return pulumi.get(self, "worker_number")
@worker_number.setter
def worker_number(self, value: pulumi.Input[int]):
pulumi.set(self, "worker_number", value)
@property
@pulumi.getter(name="workerVswitchIds")
def worker_vswitch_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "worker_vswitch_ids")
@worker_vswitch_ids.setter
def worker_vswitch_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "worker_vswitch_ids", value)
@property
@pulumi.getter
def addons(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]]:
return pulumi.get(self, "addons")
@addons.setter
def addons(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]]):
pulumi.set(self, "addons", value)
@property
@pulumi.getter(name="apiAudiences")
def api_audiences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
"""
return pulumi.get(self, "api_audiences")
@api_audiences.setter
def api_audiences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "api_audiences", value)
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> Optional[pulumi.Input[str]]:
"""
The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
"""
return pulumi.get(self, "availability_zone")
@availability_zone.setter
def availability_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_zone", value)
@property
@pulumi.getter(name="clientCert")
def client_cert(self) -> Optional[pulumi.Input[str]]:
"""
The path of client certificate, like `~/.kube/client-cert.pem`.
"""
return pulumi.get(self, "client_cert")
@client_cert.setter
def client_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_cert", value)
@property
@pulumi.getter(name="clientKey")
def client_key(self) -> Optional[pulumi.Input[str]]:
"""
The path of client key, like `~/.kube/client-key.pem`.
"""
return pulumi.get(self, "client_key")
@client_key.setter
def client_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_key", value)
@property
@pulumi.getter(name="clusterCaCert")
def cluster_ca_cert(self) -> Optional[pulumi.Input[str]]:
"""
The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
"""
return pulumi.get(self, "cluster_ca_cert")
@cluster_ca_cert.setter
def cluster_ca_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ca_cert", value)
@property
@pulumi.getter(name="clusterDomain")
def cluster_domain(self) -> Optional[pulumi.Input[str]]:
"""
Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
"""
return pulumi.get(self, "cluster_domain")
@cluster_domain.setter
def cluster_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_domain", value)
@property
@pulumi.getter(name="cpuPolicy")
def cpu_policy(self) -> Optional[pulumi.Input[str]]:
"""
Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
"""
return pulumi.get(self, "cpu_policy")
@cpu_policy.setter
def cpu_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpu_policy", value)
@property
@pulumi.getter(name="customSan")
def custom_san(self) -> Optional[pulumi.Input[str]]:
"""
Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
"""
return pulumi.get(self, "custom_san")
@custom_san.setter
def custom_san(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_san", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable cluster deletion protection.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter(name="enableSsh")
def enable_ssh(self) -> Optional[pulumi.Input[bool]]:
"""
Enable login to the node through SSH. Default to `false`.
"""
return pulumi.get(self, "enable_ssh")
@enable_ssh.setter
def enable_ssh(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_ssh", value)
@property
@pulumi.getter(name="excludeAutoscalerNodes")
def exclude_autoscaler_nodes(self) -> Optional[pulumi.Input[bool]]:
"""
Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
"""
return pulumi.get(self, "exclude_autoscaler_nodes")
@exclude_autoscaler_nodes.setter
def exclude_autoscaler_nodes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclude_autoscaler_nodes", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
Custom Image support. Must based on CentOS7 or AliyunLinux2.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="installCloudMonitor")
def install_cloud_monitor(self) -> Optional[pulumi.Input[bool]]:
"""
Install cloud monitor agent on ECS. Default to `true`.
"""
return pulumi.get(self, "install_cloud_monitor")
@install_cloud_monitor.setter
def install_cloud_monitor(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "install_cloud_monitor", value)
@property
@pulumi.getter(name="isEnterpriseSecurityGroup")
def is_enterprise_security_group(self) -> Optional[pulumi.Input[bool]]:
"""
Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
"""
return pulumi.get(self, "is_enterprise_security_group")
@is_enterprise_security_group.setter
def is_enterprise_security_group(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_enterprise_security_group", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> Optional[pulumi.Input[str]]:
"""
An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "kms_encrypted_password")
@kms_encrypted_password.setter
def kms_encrypted_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_encrypted_password", value)
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@kms_encryption_context.setter
def kms_encryption_context(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "kms_encryption_context", value)
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> Optional[pulumi.Input[str]]:
"""
The path of kube config, like `~/.kube/config`.
"""
return pulumi.get(self, "kube_config")
@kube_config.setter
def kube_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kube_config", value)
@property
@pulumi.getter(name="loadBalancerSpec")
def load_balancer_spec(self) -> Optional[pulumi.Input[str]]:
"""
The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
"""
return pulumi.get(self, "load_balancer_spec")
@load_balancer_spec.setter
def load_balancer_spec(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_spec", value)
@property
@pulumi.getter(name="masterAutoRenew")
def master_auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Enable master payment auto-renew, defaults to false.
"""
return pulumi.get(self, "master_auto_renew")
@master_auto_renew.setter
def master_auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_auto_renew", value)
@property
@pulumi.getter(name="masterAutoRenewPeriod")
def master_auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "master_auto_renew_period")
@master_auto_renew_period.setter
def master_auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_auto_renew_period", value)
@property
@pulumi.getter(name="masterDiskCategory")
def master_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "master_disk_category")
@master_disk_category.setter
def master_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_category", value)
@property
@pulumi.getter(name="masterDiskPerformanceLevel")
def master_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "master_disk_performance_level")
@master_disk_performance_level.setter
def master_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_performance_level", value)
@property
@pulumi.getter(name="masterDiskSize")
def master_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
"""
return pulumi.get(self, "master_disk_size")
@master_disk_size.setter
def master_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_disk_size", value)
@property
@pulumi.getter(name="masterDiskSnapshotPolicyId")
def master_disk_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Master node system disk auto snapshot policy.
"""
return pulumi.get(self, "master_disk_snapshot_policy_id")
@master_disk_snapshot_policy_id.setter
def master_disk_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_snapshot_policy_id", value)
@property
@pulumi.getter(name="masterInstanceChargeType")
def master_instance_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
"""
return pulumi.get(self, "master_instance_charge_type")
@master_instance_charge_type.setter
def master_instance_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_instance_charge_type", value)
@property
@pulumi.getter(name="masterPeriod")
def master_period(self) -> Optional[pulumi.Input[int]]:
"""
Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "master_period")
@master_period.setter
def master_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_period", value)
@property
@pulumi.getter(name="masterPeriodUnit")
def master_period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Master payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "master_period_unit")
@master_period_unit.setter
def master_period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_period_unit", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The kubernetes cluster's name. It is unique in one Alicloud account.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name_prefix")
@name_prefix.setter
def name_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_prefix", value)
@property
@pulumi.getter(name="newNatGateway")
def new_nat_gateway(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
"""
return pulumi.get(self, "new_nat_gateway")
@new_nat_gateway.setter
def new_nat_gateway(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "new_nat_gateway", value)
@property
@pulumi.getter(name="nodeCidrMask")
def node_cidr_mask(self) -> Optional[pulumi.Input[int]]:
"""
The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
"""
return pulumi.get(self, "node_cidr_mask")
@node_cidr_mask.setter
def node_cidr_mask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "node_cidr_mask", value)
@property
@pulumi.getter(name="nodeNameMode")
def node_name_mode(self) -> Optional[pulumi.Input[str]]:
"""
Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
"""
return pulumi.get(self, "node_name_mode")
@node_name_mode.setter
def node_name_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_name_mode", value)
@property
@pulumi.getter(name="nodePortRange")
def node_port_range(self) -> Optional[pulumi.Input[str]]:
"""
The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
"""
return pulumi.get(self, "node_port_range")
@node_port_range.setter
def node_port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_port_range", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[pulumi.Input[str]]:
"""
The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def platform(self) -> Optional[pulumi.Input[str]]:
"""
The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
"""
return pulumi.get(self, "platform")
@platform.setter
def platform(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform", value)
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> Optional[pulumi.Input[str]]:
"""
- [Flannel Specific] The CIDR block for the pod network when using Flannel.
"""
return pulumi.get(self, "pod_cidr")
@pod_cidr.setter
def pod_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pod_cidr", value)
@property
@pulumi.getter(name="podVswitchIds")
def pod_vswitch_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
- [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
"""
return pulumi.get(self, "pod_vswitch_ids")
@pod_vswitch_ids.setter
def pod_vswitch_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "pod_vswitch_ids", value)
@property
@pulumi.getter(name="proxyMode")
def proxy_mode(self) -> Optional[pulumi.Input[str]]:
"""
Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
"""
return pulumi.get(self, "proxy_mode")
@proxy_mode.setter
def proxy_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy_mode", value)
@property
@pulumi.getter(name="rdsInstances")
def rds_instances(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
RDS instance list, You can choose which RDS instances whitelist to add instances to.
"""
return pulumi.get(self, "rds_instances")
@rds_instances.setter
def rds_instances(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "rds_instances", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def runtime(self) -> Optional[pulumi.Input['KubernetesRuntimeArgs']]:
"""
The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
"""
return pulumi.get(self, "runtime")
@runtime.setter
def runtime(self, value: Optional[pulumi.Input['KubernetesRuntimeArgs']]):
pulumi.set(self, "runtime", value)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_group_id", value)
@property
@pulumi.getter(name="serviceAccountIssuer")
def service_account_issuer(self) -> Optional[pulumi.Input[str]]:
"""
The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
"""
return pulumi.get(self, "service_account_issuer")
@service_account_issuer.setter
def service_account_issuer(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_issuer", value)
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> Optional[pulumi.Input[str]]:
"""
The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
"""
return pulumi.get(self, "service_cidr")
@service_cidr.setter
def service_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_cidr", value)
@property
@pulumi.getter(name="slbInternetEnabled")
def slb_internet_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create internet load balancer for API Server. Default to true.
"""
return pulumi.get(self, "slb_internet_enabled")
@slb_internet_enabled.setter
def slb_internet_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "slb_internet_enabled", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def taints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]]:
"""
Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
"""
return pulumi.get(self, "taints")
@taints.setter
def taints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]]):
pulumi.set(self, "taints", value)
@property
@pulumi.getter
def timezone(self) -> Optional[pulumi.Input[str]]:
"""
When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
"""
return pulumi.get(self, "timezone")
@timezone.setter
def timezone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timezone", value)
@property
@pulumi.getter(name="userCa")
def user_ca(self) -> Optional[pulumi.Input[str]]:
"""
The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
"""
return pulumi.get(self, "user_ca")
@user_ca.setter
def user_ca(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_ca", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="workerAutoRenew")
def worker_auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Enable worker payment auto-renew, defaults to false.
"""
return pulumi.get(self, "worker_auto_renew")
@worker_auto_renew.setter
def worker_auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "worker_auto_renew", value)
@property
@pulumi.getter(name="workerAutoRenewPeriod")
def worker_auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "worker_auto_renew_period")
@worker_auto_renew_period.setter
def worker_auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_auto_renew_period", value)
@property
@pulumi.getter(name="workerDataDiskCategory")
def worker_data_disk_category(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "worker_data_disk_category")
@worker_data_disk_category.setter
def worker_data_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_data_disk_category", value)
@property
@pulumi.getter(name="workerDataDiskSize")
def worker_data_disk_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "worker_data_disk_size")
@worker_data_disk_size.setter
def worker_data_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_data_disk_size", value)
@property
@pulumi.getter(name="workerDataDisks")
def worker_data_disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]]:
"""
The data disk configurations of worker nodes, such as the disk type and disk size.
"""
return pulumi.get(self, "worker_data_disks")
@worker_data_disks.setter
def worker_data_disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]]):
pulumi.set(self, "worker_data_disks", value)
@property
@pulumi.getter(name="workerDiskCategory")
def worker_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "worker_disk_category")
@worker_disk_category.setter
def worker_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_category", value)
@property
@pulumi.getter(name="workerDiskPerformanceLevel")
def worker_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "worker_disk_performance_level")
@worker_disk_performance_level.setter
def worker_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_performance_level", value)
@property
@pulumi.getter(name="workerDiskSize")
def worker_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
"""
return pulumi.get(self, "worker_disk_size")
@worker_disk_size.setter
def worker_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_disk_size", value)
@property
@pulumi.getter(name="workerDiskSnapshotPolicyId")
def worker_disk_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Worker node system disk auto snapshot policy.
"""
return pulumi.get(self, "worker_disk_snapshot_policy_id")
@worker_disk_snapshot_policy_id.setter
def worker_disk_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_snapshot_policy_id", value)
@property
@pulumi.getter(name="workerInstanceChargeType")
def worker_instance_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
"""
return pulumi.get(self, "worker_instance_charge_type")
@worker_instance_charge_type.setter
def worker_instance_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_instance_charge_type", value)
@property
@pulumi.getter(name="workerPeriod")
def worker_period(self) -> Optional[pulumi.Input[int]]:
"""
Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "worker_period")
@worker_period.setter
def worker_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_period", value)
@property
@pulumi.getter(name="workerPeriodUnit")
def worker_period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Worker payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "worker_period_unit")
@worker_period_unit.setter
def worker_period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_period_unit", value)
@pulumi.input_type
class _KubernetesState:
def __init__(__self__, *,
addons: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]] = None,
api_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
certificate_authority: Optional[pulumi.Input['KubernetesCertificateAuthorityArgs']] = None,
client_cert: Optional[pulumi.Input[str]] = None,
client_key: Optional[pulumi.Input[str]] = None,
cluster_ca_cert: Optional[pulumi.Input[str]] = None,
cluster_domain: Optional[pulumi.Input[str]] = None,
connections: Optional[pulumi.Input['KubernetesConnectionsArgs']] = None,
cpu_policy: Optional[pulumi.Input[str]] = None,
custom_san: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
enable_ssh: Optional[pulumi.Input[bool]] = None,
exclude_autoscaler_nodes: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
install_cloud_monitor: Optional[pulumi.Input[bool]] = None,
is_enterprise_security_group: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
load_balancer_spec: Optional[pulumi.Input[str]] = None,
master_auto_renew: Optional[pulumi.Input[bool]] = None,
master_auto_renew_period: Optional[pulumi.Input[int]] = None,
master_disk_category: Optional[pulumi.Input[str]] = None,
master_disk_performance_level: Optional[pulumi.Input[str]] = None,
master_disk_size: Optional[pulumi.Input[int]] = None,
master_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
master_instance_charge_type: Optional[pulumi.Input[str]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_nodes: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesMasterNodeArgs']]]] = None,
master_period: Optional[pulumi.Input[int]] = None,
master_period_unit: Optional[pulumi.Input[str]] = None,
master_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
new_nat_gateway: Optional[pulumi.Input[bool]] = None,
node_cidr_mask: Optional[pulumi.Input[int]] = None,
node_name_mode: Optional[pulumi.Input[str]] = None,
node_port_range: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
pod_cidr: Optional[pulumi.Input[str]] = None,
pod_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
proxy_mode: Optional[pulumi.Input[str]] = None,
rds_instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input['KubernetesRuntimeArgs']] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
service_account_issuer: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None,
slb_id: Optional[pulumi.Input[str]] = None,
slb_internet: Optional[pulumi.Input[str]] = None,
slb_internet_enabled: Optional[pulumi.Input[bool]] = None,
slb_intranet: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
taints: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]] = None,
timezone: Optional[pulumi.Input[str]] = None,
user_ca: Optional[pulumi.Input[str]] = None,
user_data: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
worker_auto_renew: Optional[pulumi.Input[bool]] = None,
worker_auto_renew_period: Optional[pulumi.Input[int]] = None,
worker_data_disk_category: Optional[pulumi.Input[str]] = None,
worker_data_disk_size: Optional[pulumi.Input[int]] = None,
worker_data_disks: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]] = None,
worker_disk_category: Optional[pulumi.Input[str]] = None,
worker_disk_performance_level: Optional[pulumi.Input[str]] = None,
worker_disk_size: Optional[pulumi.Input[int]] = None,
worker_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
worker_instance_charge_type: Optional[pulumi.Input[str]] = None,
worker_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
worker_nodes: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerNodeArgs']]]] = None,
worker_number: Optional[pulumi.Input[int]] = None,
worker_period: Optional[pulumi.Input[int]] = None,
worker_period_unit: Optional[pulumi.Input[str]] = None,
worker_ram_role_name: Optional[pulumi.Input[str]] = None,
worker_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):
"""
Input properties used for looking up and filtering Kubernetes resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_audiences: A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
:param pulumi.Input[str] availability_zone: The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
:param pulumi.Input['KubernetesCertificateAuthorityArgs'] certificate_authority: (Available in 1.105.0+) Nested attribute containing certificate authority data for your cluster.
:param pulumi.Input[str] client_cert: The path of client certificate, like `~/.kube/client-cert.pem`.
:param pulumi.Input[str] client_key: The path of client key, like `~/.kube/client-key.pem`.
:param pulumi.Input[str] cluster_ca_cert: The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
:param pulumi.Input[str] cluster_domain: Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
:param pulumi.Input['KubernetesConnectionsArgs'] connections: Map of kubernetes cluster connection information.
:param pulumi.Input[str] cpu_policy: Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
:param pulumi.Input[str] custom_san: Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
:param pulumi.Input[bool] deletion_protection: Whether to enable cluster deletion protection.
:param pulumi.Input[bool] enable_ssh: Enable login to the node through SSH. Default to `false`.
:param pulumi.Input[bool] exclude_autoscaler_nodes: Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
:param pulumi.Input[str] image_id: Custom Image support. Must based on CentOS7 or AliyunLinux2.
:param pulumi.Input[bool] install_cloud_monitor: Install cloud monitor agent on ECS. Default to `true`.
:param pulumi.Input[bool] is_enterprise_security_group: Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
:param pulumi.Input[str] key_name: The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[str] kube_config: The path of kube config, like `~/.kube/config`.
:param pulumi.Input[str] load_balancer_spec: The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
:param pulumi.Input[bool] master_auto_renew: Enable master payment auto-renew, defaults to false.
:param pulumi.Input[int] master_auto_renew_period: Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[str] master_disk_category: The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] master_disk_performance_level: Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] master_disk_size: The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
:param pulumi.Input[str] master_disk_snapshot_policy_id: Master node system disk auto snapshot policy.
:param pulumi.Input[str] master_instance_charge_type: Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesMasterNodeArgs']]] master_nodes: List of cluster master nodes.
:param pulumi.Input[int] master_period: Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] master_period_unit: Master payment period unit, the valid value is `Month`.
:param pulumi.Input[str] name: The kubernetes cluster's name. It is unique in one Alicloud account.
:param pulumi.Input[str] nat_gateway_id: The ID of nat gateway used to launch kubernetes cluster.
:param pulumi.Input[bool] new_nat_gateway: Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
:param pulumi.Input[int] node_cidr_mask: The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
:param pulumi.Input[str] node_name_mode: Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
:param pulumi.Input[str] node_port_range: The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
:param pulumi.Input[str] os_type: The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
:param pulumi.Input[str] password: The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] platform: The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
:param pulumi.Input[str] pod_cidr: - [Flannel Specific] The CIDR block for the pod network when using Flannel.
:param pulumi.Input[Sequence[pulumi.Input[str]]] pod_vswitch_ids: - [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
:param pulumi.Input[str] proxy_mode: Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rds_instances: RDS instance list, You can choose which RDS instances whitelist to add instances to.
:param pulumi.Input[str] resource_group_id: The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
:param pulumi.Input['KubernetesRuntimeArgs'] runtime: The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
:param pulumi.Input[str] security_group_id: The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
:param pulumi.Input[str] service_account_issuer: The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
:param pulumi.Input[str] service_cidr: The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
:param pulumi.Input[bool] slb_internet_enabled: Whether to create internet load balancer for API Server. Default to true.
:param pulumi.Input[str] slb_intranet: The ID of private load balancer where the current cluster master node is located.
:param pulumi.Input[Mapping[str, Any]] tags: Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]] taints: Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
:param pulumi.Input[str] timezone: When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
:param pulumi.Input[str] user_ca: The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
:param pulumi.Input[str] user_data: Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
:param pulumi.Input[str] version: Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
:param pulumi.Input[str] vpc_id: The ID of VPC where the current cluster is located.
:param pulumi.Input[bool] worker_auto_renew: Enable worker payment auto-renew, defaults to false.
:param pulumi.Input[int] worker_auto_renew_period: Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]] worker_data_disks: The data disk configurations of worker nodes, such as the disk type and disk size.
:param pulumi.Input[str] worker_disk_category: The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] worker_disk_performance_level: Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] worker_disk_size: The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
:param pulumi.Input[str] worker_disk_snapshot_policy_id: Worker node system disk auto snapshot policy.
:param pulumi.Input[str] worker_instance_charge_type: Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] worker_instance_types: The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerNodeArgs']]] worker_nodes: List of cluster worker nodes.
:param pulumi.Input[int] worker_number: The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
:param pulumi.Input[int] worker_period: Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] worker_period_unit: Worker payment period unit, the valid value is `Month`.
:param pulumi.Input[str] worker_ram_role_name: The RamRole Name attached to worker node.
"""
if addons is not None:
pulumi.set(__self__, "addons", addons)
if api_audiences is not None:
pulumi.set(__self__, "api_audiences", api_audiences)
if availability_zone is not None:
pulumi.set(__self__, "availability_zone", availability_zone)
if certificate_authority is not None:
pulumi.set(__self__, "certificate_authority", certificate_authority)
if client_cert is not None:
pulumi.set(__self__, "client_cert", client_cert)
if client_key is not None:
pulumi.set(__self__, "client_key", client_key)
if cluster_ca_cert is not None:
pulumi.set(__self__, "cluster_ca_cert", cluster_ca_cert)
if cluster_domain is not None:
pulumi.set(__self__, "cluster_domain", cluster_domain)
if connections is not None:
pulumi.set(__self__, "connections", connections)
if cpu_policy is not None:
pulumi.set(__self__, "cpu_policy", cpu_policy)
if custom_san is not None:
pulumi.set(__self__, "custom_san", custom_san)
if deletion_protection is not None:
pulumi.set(__self__, "deletion_protection", deletion_protection)
if enable_ssh is not None:
pulumi.set(__self__, "enable_ssh", enable_ssh)
if exclude_autoscaler_nodes is not None:
pulumi.set(__self__, "exclude_autoscaler_nodes", exclude_autoscaler_nodes)
if image_id is not None:
pulumi.set(__self__, "image_id", image_id)
if install_cloud_monitor is not None:
pulumi.set(__self__, "install_cloud_monitor", install_cloud_monitor)
if is_enterprise_security_group is not None:
pulumi.set(__self__, "is_enterprise_security_group", is_enterprise_security_group)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if kms_encrypted_password is not None:
pulumi.set(__self__, "kms_encrypted_password", kms_encrypted_password)
if kms_encryption_context is not None:
pulumi.set(__self__, "kms_encryption_context", kms_encryption_context)
if kube_config is not None:
pulumi.set(__self__, "kube_config", kube_config)
if load_balancer_spec is not None:
pulumi.set(__self__, "load_balancer_spec", load_balancer_spec)
if master_auto_renew is not None:
pulumi.set(__self__, "master_auto_renew", master_auto_renew)
if master_auto_renew_period is not None:
pulumi.set(__self__, "master_auto_renew_period", master_auto_renew_period)
if master_disk_category is not None:
pulumi.set(__self__, "master_disk_category", master_disk_category)
if master_disk_performance_level is not None:
pulumi.set(__self__, "master_disk_performance_level", master_disk_performance_level)
if master_disk_size is not None:
pulumi.set(__self__, "master_disk_size", master_disk_size)
if master_disk_snapshot_policy_id is not None:
pulumi.set(__self__, "master_disk_snapshot_policy_id", master_disk_snapshot_policy_id)
if master_instance_charge_type is not None:
pulumi.set(__self__, "master_instance_charge_type", master_instance_charge_type)
if master_instance_types is not None:
pulumi.set(__self__, "master_instance_types", master_instance_types)
if master_nodes is not None:
pulumi.set(__self__, "master_nodes", master_nodes)
if master_period is not None:
pulumi.set(__self__, "master_period", master_period)
if master_period_unit is not None:
pulumi.set(__self__, "master_period_unit", master_period_unit)
if master_vswitch_ids is not None:
pulumi.set(__self__, "master_vswitch_ids", master_vswitch_ids)
if name is not None:
pulumi.set(__self__, "name", name)
if name_prefix is not None:
warnings.warn("""Field 'name_prefix' has been deprecated from provider version 1.75.0.""", DeprecationWarning)
pulumi.log.warn("""name_prefix is deprecated: Field 'name_prefix' has been deprecated from provider version 1.75.0.""")
if name_prefix is not None:
pulumi.set(__self__, "name_prefix", name_prefix)
if nat_gateway_id is not None:
pulumi.set(__self__, "nat_gateway_id", nat_gateway_id)
if new_nat_gateway is not None:
pulumi.set(__self__, "new_nat_gateway", new_nat_gateway)
if node_cidr_mask is not None:
pulumi.set(__self__, "node_cidr_mask", node_cidr_mask)
if node_name_mode is not None:
pulumi.set(__self__, "node_name_mode", node_name_mode)
if node_port_range is not None:
pulumi.set(__self__, "node_port_range", node_port_range)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if password is not None:
pulumi.set(__self__, "password", password)
if platform is not None:
pulumi.set(__self__, "platform", platform)
if pod_cidr is not None:
pulumi.set(__self__, "pod_cidr", pod_cidr)
if pod_vswitch_ids is not None:
pulumi.set(__self__, "pod_vswitch_ids", pod_vswitch_ids)
if proxy_mode is not None:
pulumi.set(__self__, "proxy_mode", proxy_mode)
if rds_instances is not None:
pulumi.set(__self__, "rds_instances", rds_instances)
if resource_group_id is not None:
pulumi.set(__self__, "resource_group_id", resource_group_id)
if runtime is not None:
pulumi.set(__self__, "runtime", runtime)
if security_group_id is not None:
pulumi.set(__self__, "security_group_id", security_group_id)
if service_account_issuer is not None:
pulumi.set(__self__, "service_account_issuer", service_account_issuer)
if service_cidr is not None:
pulumi.set(__self__, "service_cidr", service_cidr)
if slb_id is not None:
warnings.warn("""Field 'slb_id' has been deprecated from provider version 1.9.2. New field 'slb_internet' replaces it.""", DeprecationWarning)
pulumi.log.warn("""slb_id is deprecated: Field 'slb_id' has been deprecated from provider version 1.9.2. New field 'slb_internet' replaces it.""")
if slb_id is not None:
pulumi.set(__self__, "slb_id", slb_id)
if slb_internet is not None:
pulumi.set(__self__, "slb_internet", slb_internet)
if slb_internet_enabled is not None:
pulumi.set(__self__, "slb_internet_enabled", slb_internet_enabled)
if slb_intranet is not None:
pulumi.set(__self__, "slb_intranet", slb_intranet)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
if timezone is not None:
pulumi.set(__self__, "timezone", timezone)
if user_ca is not None:
pulumi.set(__self__, "user_ca", user_ca)
if user_data is not None:
pulumi.set(__self__, "user_data", user_data)
if version is not None:
pulumi.set(__self__, "version", version)
if vpc_id is not None:
pulumi.set(__self__, "vpc_id", vpc_id)
if worker_auto_renew is not None:
pulumi.set(__self__, "worker_auto_renew", worker_auto_renew)
if worker_auto_renew_period is not None:
pulumi.set(__self__, "worker_auto_renew_period", worker_auto_renew_period)
if worker_data_disk_category is not None:
pulumi.set(__self__, "worker_data_disk_category", worker_data_disk_category)
if worker_data_disk_size is not None:
pulumi.set(__self__, "worker_data_disk_size", worker_data_disk_size)
if worker_data_disks is not None:
pulumi.set(__self__, "worker_data_disks", worker_data_disks)
if worker_disk_category is not None:
pulumi.set(__self__, "worker_disk_category", worker_disk_category)
if worker_disk_performance_level is not None:
pulumi.set(__self__, "worker_disk_performance_level", worker_disk_performance_level)
if worker_disk_size is not None:
pulumi.set(__self__, "worker_disk_size", worker_disk_size)
if worker_disk_snapshot_policy_id is not None:
pulumi.set(__self__, "worker_disk_snapshot_policy_id", worker_disk_snapshot_policy_id)
if worker_instance_charge_type is not None:
pulumi.set(__self__, "worker_instance_charge_type", worker_instance_charge_type)
if worker_instance_types is not None:
pulumi.set(__self__, "worker_instance_types", worker_instance_types)
if worker_nodes is not None:
pulumi.set(__self__, "worker_nodes", worker_nodes)
if worker_number is not None:
pulumi.set(__self__, "worker_number", worker_number)
if worker_period is not None:
pulumi.set(__self__, "worker_period", worker_period)
if worker_period_unit is not None:
pulumi.set(__self__, "worker_period_unit", worker_period_unit)
if worker_ram_role_name is not None:
pulumi.set(__self__, "worker_ram_role_name", worker_ram_role_name)
if worker_vswitch_ids is not None:
pulumi.set(__self__, "worker_vswitch_ids", worker_vswitch_ids)
@property
@pulumi.getter
def addons(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]]:
return pulumi.get(self, "addons")
@addons.setter
def addons(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesAddonArgs']]]]):
pulumi.set(self, "addons", value)
@property
@pulumi.getter(name="apiAudiences")
def api_audiences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
"""
return pulumi.get(self, "api_audiences")
@api_audiences.setter
def api_audiences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "api_audiences", value)
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> Optional[pulumi.Input[str]]:
"""
The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
"""
return pulumi.get(self, "availability_zone")
@availability_zone.setter
def availability_zone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "availability_zone", value)
@property
@pulumi.getter(name="certificateAuthority")
def certificate_authority(self) -> Optional[pulumi.Input['KubernetesCertificateAuthorityArgs']]:
"""
(Available in 1.105.0+) Nested attribute containing certificate authority data for your cluster.
"""
return pulumi.get(self, "certificate_authority")
@certificate_authority.setter
def certificate_authority(self, value: Optional[pulumi.Input['KubernetesCertificateAuthorityArgs']]):
pulumi.set(self, "certificate_authority", value)
@property
@pulumi.getter(name="clientCert")
def client_cert(self) -> Optional[pulumi.Input[str]]:
"""
The path of client certificate, like `~/.kube/client-cert.pem`.
"""
return pulumi.get(self, "client_cert")
@client_cert.setter
def client_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_cert", value)
@property
@pulumi.getter(name="clientKey")
def client_key(self) -> Optional[pulumi.Input[str]]:
"""
The path of client key, like `~/.kube/client-key.pem`.
"""
return pulumi.get(self, "client_key")
@client_key.setter
def client_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_key", value)
@property
@pulumi.getter(name="clusterCaCert")
def cluster_ca_cert(self) -> Optional[pulumi.Input[str]]:
"""
The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
"""
return pulumi.get(self, "cluster_ca_cert")
@cluster_ca_cert.setter
def cluster_ca_cert(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_ca_cert", value)
@property
@pulumi.getter(name="clusterDomain")
def cluster_domain(self) -> Optional[pulumi.Input[str]]:
"""
Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
"""
return pulumi.get(self, "cluster_domain")
@cluster_domain.setter
def cluster_domain(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_domain", value)
@property
@pulumi.getter
def connections(self) -> Optional[pulumi.Input['KubernetesConnectionsArgs']]:
"""
Map of kubernetes cluster connection information.
"""
return pulumi.get(self, "connections")
@connections.setter
def connections(self, value: Optional[pulumi.Input['KubernetesConnectionsArgs']]):
pulumi.set(self, "connections", value)
@property
@pulumi.getter(name="cpuPolicy")
def cpu_policy(self) -> Optional[pulumi.Input[str]]:
"""
Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
"""
return pulumi.get(self, "cpu_policy")
@cpu_policy.setter
def cpu_policy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cpu_policy", value)
@property
@pulumi.getter(name="customSan")
def custom_san(self) -> Optional[pulumi.Input[str]]:
"""
Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
"""
return pulumi.get(self, "custom_san")
@custom_san.setter
def custom_san(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_san", value)
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to enable cluster deletion protection.
"""
return pulumi.get(self, "deletion_protection")
@deletion_protection.setter
def deletion_protection(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "deletion_protection", value)
@property
@pulumi.getter(name="enableSsh")
def enable_ssh(self) -> Optional[pulumi.Input[bool]]:
"""
Enable login to the node through SSH. Default to `false`.
"""
return pulumi.get(self, "enable_ssh")
@enable_ssh.setter
def enable_ssh(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enable_ssh", value)
@property
@pulumi.getter(name="excludeAutoscalerNodes")
def exclude_autoscaler_nodes(self) -> Optional[pulumi.Input[bool]]:
"""
Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
"""
return pulumi.get(self, "exclude_autoscaler_nodes")
@exclude_autoscaler_nodes.setter
def exclude_autoscaler_nodes(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "exclude_autoscaler_nodes", value)
@property
@pulumi.getter(name="imageId")
def image_id(self) -> Optional[pulumi.Input[str]]:
"""
Custom Image support. Must based on CentOS7 or AliyunLinux2.
"""
return pulumi.get(self, "image_id")
@image_id.setter
def image_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "image_id", value)
@property
@pulumi.getter(name="installCloudMonitor")
def install_cloud_monitor(self) -> Optional[pulumi.Input[bool]]:
"""
Install cloud monitor agent on ECS. Default to `true`.
"""
return pulumi.get(self, "install_cloud_monitor")
@install_cloud_monitor.setter
def install_cloud_monitor(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "install_cloud_monitor", value)
@property
@pulumi.getter(name="isEnterpriseSecurityGroup")
def is_enterprise_security_group(self) -> Optional[pulumi.Input[bool]]:
"""
Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
"""
return pulumi.get(self, "is_enterprise_security_group")
@is_enterprise_security_group.setter
def is_enterprise_security_group(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_enterprise_security_group", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> Optional[pulumi.Input[str]]:
"""
An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "kms_encrypted_password")
@kms_encrypted_password.setter
def kms_encrypted_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kms_encrypted_password", value)
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@kms_encryption_context.setter
def kms_encryption_context(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "kms_encryption_context", value)
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> Optional[pulumi.Input[str]]:
"""
The path of kube config, like `~/.kube/config`.
"""
return pulumi.get(self, "kube_config")
@kube_config.setter
def kube_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kube_config", value)
@property
@pulumi.getter(name="loadBalancerSpec")
def load_balancer_spec(self) -> Optional[pulumi.Input[str]]:
"""
The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
"""
return pulumi.get(self, "load_balancer_spec")
@load_balancer_spec.setter
def load_balancer_spec(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "load_balancer_spec", value)
@property
@pulumi.getter(name="masterAutoRenew")
def master_auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Enable master payment auto-renew, defaults to false.
"""
return pulumi.get(self, "master_auto_renew")
@master_auto_renew.setter
def master_auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_auto_renew", value)
@property
@pulumi.getter(name="masterAutoRenewPeriod")
def master_auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "master_auto_renew_period")
@master_auto_renew_period.setter
def master_auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_auto_renew_period", value)
@property
@pulumi.getter(name="masterDiskCategory")
def master_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "master_disk_category")
@master_disk_category.setter
def master_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_category", value)
@property
@pulumi.getter(name="masterDiskPerformanceLevel")
def master_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "master_disk_performance_level")
@master_disk_performance_level.setter
def master_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_performance_level", value)
@property
@pulumi.getter(name="masterDiskSize")
def master_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
"""
return pulumi.get(self, "master_disk_size")
@master_disk_size.setter
def master_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_disk_size", value)
@property
@pulumi.getter(name="masterDiskSnapshotPolicyId")
def master_disk_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Master node system disk auto snapshot policy.
"""
return pulumi.get(self, "master_disk_snapshot_policy_id")
@master_disk_snapshot_policy_id.setter
def master_disk_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_disk_snapshot_policy_id", value)
@property
@pulumi.getter(name="masterInstanceChargeType")
def master_instance_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
"""
return pulumi.get(self, "master_instance_charge_type")
@master_instance_charge_type.setter
def master_instance_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_instance_charge_type", value)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterNodes")
def master_nodes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesMasterNodeArgs']]]]:
"""
List of cluster master nodes.
"""
return pulumi.get(self, "master_nodes")
@master_nodes.setter
def master_nodes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesMasterNodeArgs']]]]):
pulumi.set(self, "master_nodes", value)
@property
@pulumi.getter(name="masterPeriod")
def master_period(self) -> Optional[pulumi.Input[int]]:
"""
Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "master_period")
@master_period.setter
def master_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_period", value)
@property
@pulumi.getter(name="masterPeriodUnit")
def master_period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Master payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "master_period_unit")
@master_period_unit.setter
def master_period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_period_unit", value)
@property
@pulumi.getter(name="masterVswitchIds")
def master_vswitch_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "master_vswitch_ids")
@master_vswitch_ids.setter
def master_vswitch_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_vswitch_ids", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The kubernetes cluster's name. It is unique in one Alicloud account.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name_prefix")
@name_prefix.setter
def name_prefix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_prefix", value)
@property
@pulumi.getter(name="natGatewayId")
def nat_gateway_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of nat gateway used to launch kubernetes cluster.
"""
return pulumi.get(self, "nat_gateway_id")
@nat_gateway_id.setter
def nat_gateway_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "nat_gateway_id", value)
@property
@pulumi.getter(name="newNatGateway")
def new_nat_gateway(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
"""
return pulumi.get(self, "new_nat_gateway")
@new_nat_gateway.setter
def new_nat_gateway(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "new_nat_gateway", value)
@property
@pulumi.getter(name="nodeCidrMask")
def node_cidr_mask(self) -> Optional[pulumi.Input[int]]:
"""
The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
"""
return pulumi.get(self, "node_cidr_mask")
@node_cidr_mask.setter
def node_cidr_mask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "node_cidr_mask", value)
@property
@pulumi.getter(name="nodeNameMode")
def node_name_mode(self) -> Optional[pulumi.Input[str]]:
"""
Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
"""
return pulumi.get(self, "node_name_mode")
@node_name_mode.setter
def node_name_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_name_mode", value)
@property
@pulumi.getter(name="nodePortRange")
def node_port_range(self) -> Optional[pulumi.Input[str]]:
"""
The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
"""
return pulumi.get(self, "node_port_range")
@node_port_range.setter
def node_port_range(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_port_range", value)
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[pulumi.Input[str]]:
"""
The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
"""
return pulumi.get(self, "os_type")
@os_type.setter
def os_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "os_type", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter
def platform(self) -> Optional[pulumi.Input[str]]:
"""
The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
"""
return pulumi.get(self, "platform")
@platform.setter
def platform(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "platform", value)
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> Optional[pulumi.Input[str]]:
"""
- [Flannel Specific] The CIDR block for the pod network when using Flannel.
"""
return pulumi.get(self, "pod_cidr")
@pod_cidr.setter
def pod_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pod_cidr", value)
@property
@pulumi.getter(name="podVswitchIds")
def pod_vswitch_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
- [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
"""
return pulumi.get(self, "pod_vswitch_ids")
@pod_vswitch_ids.setter
def pod_vswitch_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "pod_vswitch_ids", value)
@property
@pulumi.getter(name="proxyMode")
def proxy_mode(self) -> Optional[pulumi.Input[str]]:
"""
Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
"""
return pulumi.get(self, "proxy_mode")
@proxy_mode.setter
def proxy_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "proxy_mode", value)
@property
@pulumi.getter(name="rdsInstances")
def rds_instances(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
RDS instance list, You can choose which RDS instances whitelist to add instances to.
"""
return pulumi.get(self, "rds_instances")
@rds_instances.setter
def rds_instances(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "rds_instances", value)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
"""
return pulumi.get(self, "resource_group_id")
@resource_group_id.setter
def resource_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_id", value)
@property
@pulumi.getter
def runtime(self) -> Optional[pulumi.Input['KubernetesRuntimeArgs']]:
"""
The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
"""
return pulumi.get(self, "runtime")
@runtime.setter
def runtime(self, value: Optional[pulumi.Input['KubernetesRuntimeArgs']]):
pulumi.set(self, "runtime", value)
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
"""
return pulumi.get(self, "security_group_id")
@security_group_id.setter
def security_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_group_id", value)
@property
@pulumi.getter(name="serviceAccountIssuer")
def service_account_issuer(self) -> Optional[pulumi.Input[str]]:
"""
The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
"""
return pulumi.get(self, "service_account_issuer")
@service_account_issuer.setter
def service_account_issuer(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_issuer", value)
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> Optional[pulumi.Input[str]]:
"""
The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
"""
return pulumi.get(self, "service_cidr")
@service_cidr.setter
def service_cidr(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_cidr", value)
@property
@pulumi.getter(name="slbId")
def slb_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "slb_id")
@slb_id.setter
def slb_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "slb_id", value)
@property
@pulumi.getter(name="slbInternet")
def slb_internet(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "slb_internet")
@slb_internet.setter
def slb_internet(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "slb_internet", value)
@property
@pulumi.getter(name="slbInternetEnabled")
def slb_internet_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether to create internet load balancer for API Server. Default to true.
"""
return pulumi.get(self, "slb_internet_enabled")
@slb_internet_enabled.setter
def slb_internet_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "slb_internet_enabled", value)
@property
@pulumi.getter(name="slbIntranet")
def slb_intranet(self) -> Optional[pulumi.Input[str]]:
"""
The ID of private load balancer where the current cluster master node is located.
"""
return pulumi.get(self, "slb_intranet")
@slb_intranet.setter
def slb_intranet(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "slb_intranet", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter
def taints(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]]:
"""
Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
"""
return pulumi.get(self, "taints")
@taints.setter
def taints(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesTaintArgs']]]]):
pulumi.set(self, "taints", value)
@property
@pulumi.getter
def timezone(self) -> Optional[pulumi.Input[str]]:
"""
When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
"""
return pulumi.get(self, "timezone")
@timezone.setter
def timezone(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "timezone", value)
@property
@pulumi.getter(name="userCa")
def user_ca(self) -> Optional[pulumi.Input[str]]:
"""
The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
"""
return pulumi.get(self, "user_ca")
@user_ca.setter
def user_ca(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_ca", value)
@property
@pulumi.getter(name="userData")
def user_data(self) -> Optional[pulumi.Input[str]]:
"""
Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
"""
return pulumi.get(self, "user_data")
@user_data.setter
def user_data(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_data", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[str]]:
"""
Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "version", value)
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of VPC where the current cluster is located.
"""
return pulumi.get(self, "vpc_id")
@vpc_id.setter
def vpc_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vpc_id", value)
@property
@pulumi.getter(name="workerAutoRenew")
def worker_auto_renew(self) -> Optional[pulumi.Input[bool]]:
"""
Enable worker payment auto-renew, defaults to false.
"""
return pulumi.get(self, "worker_auto_renew")
@worker_auto_renew.setter
def worker_auto_renew(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "worker_auto_renew", value)
@property
@pulumi.getter(name="workerAutoRenewPeriod")
def worker_auto_renew_period(self) -> Optional[pulumi.Input[int]]:
"""
Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "worker_auto_renew_period")
@worker_auto_renew_period.setter
def worker_auto_renew_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_auto_renew_period", value)
@property
@pulumi.getter(name="workerDataDiskCategory")
def worker_data_disk_category(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "worker_data_disk_category")
@worker_data_disk_category.setter
def worker_data_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_data_disk_category", value)
@property
@pulumi.getter(name="workerDataDiskSize")
def worker_data_disk_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "worker_data_disk_size")
@worker_data_disk_size.setter
def worker_data_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_data_disk_size", value)
@property
@pulumi.getter(name="workerDataDisks")
def worker_data_disks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]]:
"""
The data disk configurations of worker nodes, such as the disk type and disk size.
"""
return pulumi.get(self, "worker_data_disks")
@worker_data_disks.setter
def worker_data_disks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerDataDiskArgs']]]]):
pulumi.set(self, "worker_data_disks", value)
@property
@pulumi.getter(name="workerDiskCategory")
def worker_disk_category(self) -> Optional[pulumi.Input[str]]:
"""
The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "worker_disk_category")
@worker_disk_category.setter
def worker_disk_category(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_category", value)
@property
@pulumi.getter(name="workerDiskPerformanceLevel")
def worker_disk_performance_level(self) -> Optional[pulumi.Input[str]]:
"""
Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "worker_disk_performance_level")
@worker_disk_performance_level.setter
def worker_disk_performance_level(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_performance_level", value)
@property
@pulumi.getter(name="workerDiskSize")
def worker_disk_size(self) -> Optional[pulumi.Input[int]]:
"""
The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
"""
return pulumi.get(self, "worker_disk_size")
@worker_disk_size.setter
def worker_disk_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_disk_size", value)
@property
@pulumi.getter(name="workerDiskSnapshotPolicyId")
def worker_disk_snapshot_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
Worker node system disk auto snapshot policy.
"""
return pulumi.get(self, "worker_disk_snapshot_policy_id")
@worker_disk_snapshot_policy_id.setter
def worker_disk_snapshot_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_disk_snapshot_policy_id", value)
@property
@pulumi.getter(name="workerInstanceChargeType")
def worker_instance_charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
"""
return pulumi.get(self, "worker_instance_charge_type")
@worker_instance_charge_type.setter
def worker_instance_charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_instance_charge_type", value)
@property
@pulumi.getter(name="workerInstanceTypes")
def worker_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "worker_instance_types")
@worker_instance_types.setter
def worker_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "worker_instance_types", value)
@property
@pulumi.getter(name="workerNodes")
def worker_nodes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerNodeArgs']]]]:
"""
List of cluster worker nodes.
"""
return pulumi.get(self, "worker_nodes")
@worker_nodes.setter
def worker_nodes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['KubernetesWorkerNodeArgs']]]]):
pulumi.set(self, "worker_nodes", value)
@property
@pulumi.getter(name="workerNumber")
def worker_number(self) -> Optional[pulumi.Input[int]]:
"""
The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
"""
return pulumi.get(self, "worker_number")
@worker_number.setter
def worker_number(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_number", value)
@property
@pulumi.getter(name="workerPeriod")
def worker_period(self) -> Optional[pulumi.Input[int]]:
"""
Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "worker_period")
@worker_period.setter
def worker_period(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "worker_period", value)
@property
@pulumi.getter(name="workerPeriodUnit")
def worker_period_unit(self) -> Optional[pulumi.Input[str]]:
"""
Worker payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "worker_period_unit")
@worker_period_unit.setter
def worker_period_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_period_unit", value)
@property
@pulumi.getter(name="workerRamRoleName")
def worker_ram_role_name(self) -> Optional[pulumi.Input[str]]:
"""
The RamRole Name attached to worker node.
"""
return pulumi.get(self, "worker_ram_role_name")
@worker_ram_role_name.setter
def worker_ram_role_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "worker_ram_role_name", value)
@property
@pulumi.getter(name="workerVswitchIds")
def worker_vswitch_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "worker_vswitch_ids")
@worker_vswitch_ids.setter
def worker_vswitch_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "worker_vswitch_ids", value)
class Kubernetes(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesAddonArgs']]]]] = None,
api_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
client_cert: Optional[pulumi.Input[str]] = None,
client_key: Optional[pulumi.Input[str]] = None,
cluster_ca_cert: Optional[pulumi.Input[str]] = None,
cluster_domain: Optional[pulumi.Input[str]] = None,
cpu_policy: Optional[pulumi.Input[str]] = None,
custom_san: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
enable_ssh: Optional[pulumi.Input[bool]] = None,
exclude_autoscaler_nodes: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
install_cloud_monitor: Optional[pulumi.Input[bool]] = None,
is_enterprise_security_group: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
load_balancer_spec: Optional[pulumi.Input[str]] = None,
master_auto_renew: Optional[pulumi.Input[bool]] = None,
master_auto_renew_period: Optional[pulumi.Input[int]] = None,
master_disk_category: Optional[pulumi.Input[str]] = None,
master_disk_performance_level: Optional[pulumi.Input[str]] = None,
master_disk_size: Optional[pulumi.Input[int]] = None,
master_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
master_instance_charge_type: Optional[pulumi.Input[str]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_period: Optional[pulumi.Input[int]] = None,
master_period_unit: Optional[pulumi.Input[str]] = None,
master_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
new_nat_gateway: Optional[pulumi.Input[bool]] = None,
node_cidr_mask: Optional[pulumi.Input[int]] = None,
node_name_mode: Optional[pulumi.Input[str]] = None,
node_port_range: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
pod_cidr: Optional[pulumi.Input[str]] = None,
pod_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
proxy_mode: Optional[pulumi.Input[str]] = None,
rds_instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[pulumi.InputType['KubernetesRuntimeArgs']]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
service_account_issuer: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None,
slb_internet_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
taints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesTaintArgs']]]]] = None,
timezone: Optional[pulumi.Input[str]] = None,
user_ca: Optional[pulumi.Input[str]] = None,
user_data: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
worker_auto_renew: Optional[pulumi.Input[bool]] = None,
worker_auto_renew_period: Optional[pulumi.Input[int]] = None,
worker_data_disk_category: Optional[pulumi.Input[str]] = None,
worker_data_disk_size: Optional[pulumi.Input[int]] = None,
worker_data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerDataDiskArgs']]]]] = None,
worker_disk_category: Optional[pulumi.Input[str]] = None,
worker_disk_performance_level: Optional[pulumi.Input[str]] = None,
worker_disk_size: Optional[pulumi.Input[int]] = None,
worker_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
worker_instance_charge_type: Optional[pulumi.Input[str]] = None,
worker_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
worker_number: Optional[pulumi.Input[int]] = None,
worker_period: Optional[pulumi.Input[int]] = None,
worker_period_unit: Optional[pulumi.Input[str]] = None,
worker_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
"""
## Import
Kubernetes cluster can be imported using the id, e.g. Then complete the main.tf accords to the result of `terraform plan`
```sh
$ pulumi import alicloud:cs/kubernetes:Kubernetes alicloud_cs_kubernetes.main cluster-id
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_audiences: A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
:param pulumi.Input[str] availability_zone: The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
:param pulumi.Input[str] client_cert: The path of client certificate, like `~/.kube/client-cert.pem`.
:param pulumi.Input[str] client_key: The path of client key, like `~/.kube/client-key.pem`.
:param pulumi.Input[str] cluster_ca_cert: The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
:param pulumi.Input[str] cluster_domain: Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
:param pulumi.Input[str] cpu_policy: Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
:param pulumi.Input[str] custom_san: Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
:param pulumi.Input[bool] deletion_protection: Whether to enable cluster deletion protection.
:param pulumi.Input[bool] enable_ssh: Enable login to the node through SSH. Default to `false`.
:param pulumi.Input[bool] exclude_autoscaler_nodes: Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
:param pulumi.Input[str] image_id: Custom Image support. Must based on CentOS7 or AliyunLinux2.
:param pulumi.Input[bool] install_cloud_monitor: Install cloud monitor agent on ECS. Default to `true`.
:param pulumi.Input[bool] is_enterprise_security_group: Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
:param pulumi.Input[str] key_name: The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[str] kube_config: The path of kube config, like `~/.kube/config`.
:param pulumi.Input[str] load_balancer_spec: The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
:param pulumi.Input[bool] master_auto_renew: Enable master payment auto-renew, defaults to false.
:param pulumi.Input[int] master_auto_renew_period: Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[str] master_disk_category: The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] master_disk_performance_level: Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] master_disk_size: The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
:param pulumi.Input[str] master_disk_snapshot_policy_id: Master node system disk auto snapshot policy.
:param pulumi.Input[str] master_instance_charge_type: Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[int] master_period: Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] master_period_unit: Master payment period unit, the valid value is `Month`.
:param pulumi.Input[str] name: The kubernetes cluster's name. It is unique in one Alicloud account.
:param pulumi.Input[bool] new_nat_gateway: Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
:param pulumi.Input[int] node_cidr_mask: The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
:param pulumi.Input[str] node_name_mode: Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
:param pulumi.Input[str] node_port_range: The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
:param pulumi.Input[str] os_type: The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
:param pulumi.Input[str] password: The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] platform: The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
:param pulumi.Input[str] pod_cidr: - [Flannel Specific] The CIDR block for the pod network when using Flannel.
:param pulumi.Input[Sequence[pulumi.Input[str]]] pod_vswitch_ids: - [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
:param pulumi.Input[str] proxy_mode: Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rds_instances: RDS instance list, You can choose which RDS instances whitelist to add instances to.
:param pulumi.Input[str] resource_group_id: The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
:param pulumi.Input[pulumi.InputType['KubernetesRuntimeArgs']] runtime: The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
:param pulumi.Input[str] security_group_id: The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
:param pulumi.Input[str] service_account_issuer: The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
:param pulumi.Input[str] service_cidr: The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
:param pulumi.Input[bool] slb_internet_enabled: Whether to create internet load balancer for API Server. Default to true.
:param pulumi.Input[Mapping[str, Any]] tags: Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesTaintArgs']]]] taints: Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
:param pulumi.Input[str] timezone: When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
:param pulumi.Input[str] user_ca: The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
:param pulumi.Input[str] user_data: Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
:param pulumi.Input[str] version: Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
:param pulumi.Input[bool] worker_auto_renew: Enable worker payment auto-renew, defaults to false.
:param pulumi.Input[int] worker_auto_renew_period: Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerDataDiskArgs']]]] worker_data_disks: The data disk configurations of worker nodes, such as the disk type and disk size.
:param pulumi.Input[str] worker_disk_category: The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] worker_disk_performance_level: Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] worker_disk_size: The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
:param pulumi.Input[str] worker_disk_snapshot_policy_id: Worker node system disk auto snapshot policy.
:param pulumi.Input[str] worker_instance_charge_type: Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] worker_instance_types: The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[int] worker_number: The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
:param pulumi.Input[int] worker_period: Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] worker_period_unit: Worker payment period unit, the valid value is `Month`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: KubernetesArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
## Import
Kubernetes cluster can be imported using the id, e.g. Then complete the main.tf accords to the result of `terraform plan`
```sh
$ pulumi import alicloud:cs/kubernetes:Kubernetes alicloud_cs_kubernetes.main cluster-id
```
:param str resource_name: The name of the resource.
:param KubernetesArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(KubernetesArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
addons: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesAddonArgs']]]]] = None,
api_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
client_cert: Optional[pulumi.Input[str]] = None,
client_key: Optional[pulumi.Input[str]] = None,
cluster_ca_cert: Optional[pulumi.Input[str]] = None,
cluster_domain: Optional[pulumi.Input[str]] = None,
cpu_policy: Optional[pulumi.Input[str]] = None,
custom_san: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
enable_ssh: Optional[pulumi.Input[bool]] = None,
exclude_autoscaler_nodes: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
install_cloud_monitor: Optional[pulumi.Input[bool]] = None,
is_enterprise_security_group: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
load_balancer_spec: Optional[pulumi.Input[str]] = None,
master_auto_renew: Optional[pulumi.Input[bool]] = None,
master_auto_renew_period: Optional[pulumi.Input[int]] = None,
master_disk_category: Optional[pulumi.Input[str]] = None,
master_disk_performance_level: Optional[pulumi.Input[str]] = None,
master_disk_size: Optional[pulumi.Input[int]] = None,
master_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
master_instance_charge_type: Optional[pulumi.Input[str]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_period: Optional[pulumi.Input[int]] = None,
master_period_unit: Optional[pulumi.Input[str]] = None,
master_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
new_nat_gateway: Optional[pulumi.Input[bool]] = None,
node_cidr_mask: Optional[pulumi.Input[int]] = None,
node_name_mode: Optional[pulumi.Input[str]] = None,
node_port_range: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
pod_cidr: Optional[pulumi.Input[str]] = None,
pod_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
proxy_mode: Optional[pulumi.Input[str]] = None,
rds_instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[pulumi.InputType['KubernetesRuntimeArgs']]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
service_account_issuer: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None,
slb_internet_enabled: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
taints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesTaintArgs']]]]] = None,
timezone: Optional[pulumi.Input[str]] = None,
user_ca: Optional[pulumi.Input[str]] = None,
user_data: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
worker_auto_renew: Optional[pulumi.Input[bool]] = None,
worker_auto_renew_period: Optional[pulumi.Input[int]] = None,
worker_data_disk_category: Optional[pulumi.Input[str]] = None,
worker_data_disk_size: Optional[pulumi.Input[int]] = None,
worker_data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerDataDiskArgs']]]]] = None,
worker_disk_category: Optional[pulumi.Input[str]] = None,
worker_disk_performance_level: Optional[pulumi.Input[str]] = None,
worker_disk_size: Optional[pulumi.Input[int]] = None,
worker_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
worker_instance_charge_type: Optional[pulumi.Input[str]] = None,
worker_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
worker_number: Optional[pulumi.Input[int]] = None,
worker_period: Optional[pulumi.Input[int]] = None,
worker_period_unit: Optional[pulumi.Input[str]] = None,
worker_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = KubernetesArgs.__new__(KubernetesArgs)
__props__.__dict__["addons"] = addons
__props__.__dict__["api_audiences"] = api_audiences
__props__.__dict__["availability_zone"] = availability_zone
__props__.__dict__["client_cert"] = client_cert
__props__.__dict__["client_key"] = client_key
__props__.__dict__["cluster_ca_cert"] = cluster_ca_cert
__props__.__dict__["cluster_domain"] = cluster_domain
__props__.__dict__["cpu_policy"] = cpu_policy
__props__.__dict__["custom_san"] = custom_san
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["enable_ssh"] = enable_ssh
__props__.__dict__["exclude_autoscaler_nodes"] = exclude_autoscaler_nodes
__props__.__dict__["image_id"] = image_id
__props__.__dict__["install_cloud_monitor"] = install_cloud_monitor
__props__.__dict__["is_enterprise_security_group"] = is_enterprise_security_group
__props__.__dict__["key_name"] = key_name
__props__.__dict__["kms_encrypted_password"] = kms_encrypted_password
__props__.__dict__["kms_encryption_context"] = kms_encryption_context
__props__.__dict__["kube_config"] = kube_config
__props__.__dict__["load_balancer_spec"] = load_balancer_spec
__props__.__dict__["master_auto_renew"] = master_auto_renew
__props__.__dict__["master_auto_renew_period"] = master_auto_renew_period
__props__.__dict__["master_disk_category"] = master_disk_category
__props__.__dict__["master_disk_performance_level"] = master_disk_performance_level
__props__.__dict__["master_disk_size"] = master_disk_size
__props__.__dict__["master_disk_snapshot_policy_id"] = master_disk_snapshot_policy_id
__props__.__dict__["master_instance_charge_type"] = master_instance_charge_type
if master_instance_types is None and not opts.urn:
raise TypeError("Missing required property 'master_instance_types'")
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_period"] = master_period
__props__.__dict__["master_period_unit"] = master_period_unit
if master_vswitch_ids is None and not opts.urn:
raise TypeError("Missing required property 'master_vswitch_ids'")
__props__.__dict__["master_vswitch_ids"] = master_vswitch_ids
__props__.__dict__["name"] = name
if name_prefix is not None and not opts.urn:
warnings.warn("""Field 'name_prefix' has been deprecated from provider version 1.75.0.""", DeprecationWarning)
pulumi.log.warn("""name_prefix is deprecated: Field 'name_prefix' has been deprecated from provider version 1.75.0.""")
__props__.__dict__["name_prefix"] = name_prefix
__props__.__dict__["new_nat_gateway"] = new_nat_gateway
__props__.__dict__["node_cidr_mask"] = node_cidr_mask
__props__.__dict__["node_name_mode"] = node_name_mode
__props__.__dict__["node_port_range"] = node_port_range
__props__.__dict__["os_type"] = os_type
__props__.__dict__["password"] = password
__props__.__dict__["platform"] = platform
__props__.__dict__["pod_cidr"] = pod_cidr
__props__.__dict__["pod_vswitch_ids"] = pod_vswitch_ids
__props__.__dict__["proxy_mode"] = proxy_mode
__props__.__dict__["rds_instances"] = rds_instances
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["runtime"] = runtime
__props__.__dict__["security_group_id"] = security_group_id
__props__.__dict__["service_account_issuer"] = service_account_issuer
__props__.__dict__["service_cidr"] = service_cidr
__props__.__dict__["slb_internet_enabled"] = slb_internet_enabled
__props__.__dict__["tags"] = tags
__props__.__dict__["taints"] = taints
__props__.__dict__["timezone"] = timezone
__props__.__dict__["user_ca"] = user_ca
__props__.__dict__["user_data"] = user_data
__props__.__dict__["version"] = version
__props__.__dict__["worker_auto_renew"] = worker_auto_renew
__props__.__dict__["worker_auto_renew_period"] = worker_auto_renew_period
__props__.__dict__["worker_data_disk_category"] = worker_data_disk_category
__props__.__dict__["worker_data_disk_size"] = worker_data_disk_size
__props__.__dict__["worker_data_disks"] = worker_data_disks
__props__.__dict__["worker_disk_category"] = worker_disk_category
__props__.__dict__["worker_disk_performance_level"] = worker_disk_performance_level
__props__.__dict__["worker_disk_size"] = worker_disk_size
__props__.__dict__["worker_disk_snapshot_policy_id"] = worker_disk_snapshot_policy_id
__props__.__dict__["worker_instance_charge_type"] = worker_instance_charge_type
if worker_instance_types is None and not opts.urn:
raise TypeError("Missing required property 'worker_instance_types'")
__props__.__dict__["worker_instance_types"] = worker_instance_types
if worker_number is None and not opts.urn:
raise TypeError("Missing required property 'worker_number'")
__props__.__dict__["worker_number"] = worker_number
__props__.__dict__["worker_period"] = worker_period
__props__.__dict__["worker_period_unit"] = worker_period_unit
if worker_vswitch_ids is None and not opts.urn:
raise TypeError("Missing required property 'worker_vswitch_ids'")
__props__.__dict__["worker_vswitch_ids"] = worker_vswitch_ids
__props__.__dict__["certificate_authority"] = None
__props__.__dict__["connections"] = None
__props__.__dict__["master_nodes"] = None
__props__.__dict__["nat_gateway_id"] = None
__props__.__dict__["slb_id"] = None
__props__.__dict__["slb_internet"] = None
__props__.__dict__["slb_intranet"] = None
__props__.__dict__["vpc_id"] = None
__props__.__dict__["worker_nodes"] = None
__props__.__dict__["worker_ram_role_name"] = None
super(Kubernetes, __self__).__init__(
'alicloud:cs/kubernetes:Kubernetes',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
addons: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesAddonArgs']]]]] = None,
api_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
availability_zone: Optional[pulumi.Input[str]] = None,
certificate_authority: Optional[pulumi.Input[pulumi.InputType['KubernetesCertificateAuthorityArgs']]] = None,
client_cert: Optional[pulumi.Input[str]] = None,
client_key: Optional[pulumi.Input[str]] = None,
cluster_ca_cert: Optional[pulumi.Input[str]] = None,
cluster_domain: Optional[pulumi.Input[str]] = None,
connections: Optional[pulumi.Input[pulumi.InputType['KubernetesConnectionsArgs']]] = None,
cpu_policy: Optional[pulumi.Input[str]] = None,
custom_san: Optional[pulumi.Input[str]] = None,
deletion_protection: Optional[pulumi.Input[bool]] = None,
enable_ssh: Optional[pulumi.Input[bool]] = None,
exclude_autoscaler_nodes: Optional[pulumi.Input[bool]] = None,
image_id: Optional[pulumi.Input[str]] = None,
install_cloud_monitor: Optional[pulumi.Input[bool]] = None,
is_enterprise_security_group: Optional[pulumi.Input[bool]] = None,
key_name: Optional[pulumi.Input[str]] = None,
kms_encrypted_password: Optional[pulumi.Input[str]] = None,
kms_encryption_context: Optional[pulumi.Input[Mapping[str, Any]]] = None,
kube_config: Optional[pulumi.Input[str]] = None,
load_balancer_spec: Optional[pulumi.Input[str]] = None,
master_auto_renew: Optional[pulumi.Input[bool]] = None,
master_auto_renew_period: Optional[pulumi.Input[int]] = None,
master_disk_category: Optional[pulumi.Input[str]] = None,
master_disk_performance_level: Optional[pulumi.Input[str]] = None,
master_disk_size: Optional[pulumi.Input[int]] = None,
master_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
master_instance_charge_type: Optional[pulumi.Input[str]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_nodes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesMasterNodeArgs']]]]] = None,
master_period: Optional[pulumi.Input[int]] = None,
master_period_unit: Optional[pulumi.Input[str]] = None,
master_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
name_prefix: Optional[pulumi.Input[str]] = None,
nat_gateway_id: Optional[pulumi.Input[str]] = None,
new_nat_gateway: Optional[pulumi.Input[bool]] = None,
node_cidr_mask: Optional[pulumi.Input[int]] = None,
node_name_mode: Optional[pulumi.Input[str]] = None,
node_port_range: Optional[pulumi.Input[str]] = None,
os_type: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
platform: Optional[pulumi.Input[str]] = None,
pod_cidr: Optional[pulumi.Input[str]] = None,
pod_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
proxy_mode: Optional[pulumi.Input[str]] = None,
rds_instances: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
resource_group_id: Optional[pulumi.Input[str]] = None,
runtime: Optional[pulumi.Input[pulumi.InputType['KubernetesRuntimeArgs']]] = None,
security_group_id: Optional[pulumi.Input[str]] = None,
service_account_issuer: Optional[pulumi.Input[str]] = None,
service_cidr: Optional[pulumi.Input[str]] = None,
slb_id: Optional[pulumi.Input[str]] = None,
slb_internet: Optional[pulumi.Input[str]] = None,
slb_internet_enabled: Optional[pulumi.Input[bool]] = None,
slb_intranet: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,
taints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesTaintArgs']]]]] = None,
timezone: Optional[pulumi.Input[str]] = None,
user_ca: Optional[pulumi.Input[str]] = None,
user_data: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[str]] = None,
vpc_id: Optional[pulumi.Input[str]] = None,
worker_auto_renew: Optional[pulumi.Input[bool]] = None,
worker_auto_renew_period: Optional[pulumi.Input[int]] = None,
worker_data_disk_category: Optional[pulumi.Input[str]] = None,
worker_data_disk_size: Optional[pulumi.Input[int]] = None,
worker_data_disks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerDataDiskArgs']]]]] = None,
worker_disk_category: Optional[pulumi.Input[str]] = None,
worker_disk_performance_level: Optional[pulumi.Input[str]] = None,
worker_disk_size: Optional[pulumi.Input[int]] = None,
worker_disk_snapshot_policy_id: Optional[pulumi.Input[str]] = None,
worker_instance_charge_type: Optional[pulumi.Input[str]] = None,
worker_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
worker_nodes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerNodeArgs']]]]] = None,
worker_number: Optional[pulumi.Input[int]] = None,
worker_period: Optional[pulumi.Input[int]] = None,
worker_period_unit: Optional[pulumi.Input[str]] = None,
worker_ram_role_name: Optional[pulumi.Input[str]] = None,
worker_vswitch_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Kubernetes':
"""
Get an existing Kubernetes resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] api_audiences: A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
:param pulumi.Input[str] availability_zone: The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
:param pulumi.Input[pulumi.InputType['KubernetesCertificateAuthorityArgs']] certificate_authority: (Available in 1.105.0+) Nested attribute containing certificate authority data for your cluster.
:param pulumi.Input[str] client_cert: The path of client certificate, like `~/.kube/client-cert.pem`.
:param pulumi.Input[str] client_key: The path of client key, like `~/.kube/client-key.pem`.
:param pulumi.Input[str] cluster_ca_cert: The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
:param pulumi.Input[str] cluster_domain: Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
:param pulumi.Input[pulumi.InputType['KubernetesConnectionsArgs']] connections: Map of kubernetes cluster connection information.
:param pulumi.Input[str] cpu_policy: Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
:param pulumi.Input[str] custom_san: Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
:param pulumi.Input[bool] deletion_protection: Whether to enable cluster deletion protection.
:param pulumi.Input[bool] enable_ssh: Enable login to the node through SSH. Default to `false`.
:param pulumi.Input[bool] exclude_autoscaler_nodes: Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
:param pulumi.Input[str] image_id: Custom Image support. Must based on CentOS7 or AliyunLinux2.
:param pulumi.Input[bool] install_cloud_monitor: Install cloud monitor agent on ECS. Default to `true`.
:param pulumi.Input[bool] is_enterprise_security_group: Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
:param pulumi.Input[str] key_name: The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] kms_encrypted_password: An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[Mapping[str, Any]] kms_encryption_context: An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
:param pulumi.Input[str] kube_config: The path of kube config, like `~/.kube/config`.
:param pulumi.Input[str] load_balancer_spec: The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
:param pulumi.Input[bool] master_auto_renew: Enable master payment auto-renew, defaults to false.
:param pulumi.Input[int] master_auto_renew_period: Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[str] master_disk_category: The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] master_disk_performance_level: Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] master_disk_size: The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
:param pulumi.Input[str] master_disk_snapshot_policy_id: Master node system disk auto snapshot policy.
:param pulumi.Input[str] master_instance_charge_type: Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesMasterNodeArgs']]]] master_nodes: List of cluster master nodes.
:param pulumi.Input[int] master_period: Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] master_period_unit: Master payment period unit, the valid value is `Month`.
:param pulumi.Input[str] name: The kubernetes cluster's name. It is unique in one Alicloud account.
:param pulumi.Input[str] nat_gateway_id: The ID of nat gateway used to launch kubernetes cluster.
:param pulumi.Input[bool] new_nat_gateway: Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
:param pulumi.Input[int] node_cidr_mask: The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
:param pulumi.Input[str] node_name_mode: Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
:param pulumi.Input[str] node_port_range: The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
:param pulumi.Input[str] os_type: The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
:param pulumi.Input[str] password: The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
:param pulumi.Input[str] platform: The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
:param pulumi.Input[str] pod_cidr: - [Flannel Specific] The CIDR block for the pod network when using Flannel.
:param pulumi.Input[Sequence[pulumi.Input[str]]] pod_vswitch_ids: - [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
:param pulumi.Input[str] proxy_mode: Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
:param pulumi.Input[Sequence[pulumi.Input[str]]] rds_instances: RDS instance list, You can choose which RDS instances whitelist to add instances to.
:param pulumi.Input[str] resource_group_id: The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
:param pulumi.Input[pulumi.InputType['KubernetesRuntimeArgs']] runtime: The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
:param pulumi.Input[str] security_group_id: The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
:param pulumi.Input[str] service_account_issuer: The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
:param pulumi.Input[str] service_cidr: The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
:param pulumi.Input[bool] slb_internet_enabled: Whether to create internet load balancer for API Server. Default to true.
:param pulumi.Input[str] slb_intranet: The ID of private load balancer where the current cluster master node is located.
:param pulumi.Input[Mapping[str, Any]] tags: Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesTaintArgs']]]] taints: Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
:param pulumi.Input[str] timezone: When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
:param pulumi.Input[str] user_ca: The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
:param pulumi.Input[str] user_data: Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
:param pulumi.Input[str] version: Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
:param pulumi.Input[str] vpc_id: The ID of VPC where the current cluster is located.
:param pulumi.Input[bool] worker_auto_renew: Enable worker payment auto-renew, defaults to false.
:param pulumi.Input[int] worker_auto_renew_period: Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerDataDiskArgs']]]] worker_data_disks: The data disk configurations of worker nodes, such as the disk type and disk size.
:param pulumi.Input[str] worker_disk_category: The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
:param pulumi.Input[str] worker_disk_performance_level: Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
:param pulumi.Input[int] worker_disk_size: The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
:param pulumi.Input[str] worker_disk_snapshot_policy_id: Worker node system disk auto snapshot policy.
:param pulumi.Input[str] worker_instance_charge_type: Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
:param pulumi.Input[Sequence[pulumi.Input[str]]] worker_instance_types: The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['KubernetesWorkerNodeArgs']]]] worker_nodes: List of cluster worker nodes.
:param pulumi.Input[int] worker_number: The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
:param pulumi.Input[int] worker_period: Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
:param pulumi.Input[str] worker_period_unit: Worker payment period unit, the valid value is `Month`.
:param pulumi.Input[str] worker_ram_role_name: The RamRole Name attached to worker node.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _KubernetesState.__new__(_KubernetesState)
__props__.__dict__["addons"] = addons
__props__.__dict__["api_audiences"] = api_audiences
__props__.__dict__["availability_zone"] = availability_zone
__props__.__dict__["certificate_authority"] = certificate_authority
__props__.__dict__["client_cert"] = client_cert
__props__.__dict__["client_key"] = client_key
__props__.__dict__["cluster_ca_cert"] = cluster_ca_cert
__props__.__dict__["cluster_domain"] = cluster_domain
__props__.__dict__["connections"] = connections
__props__.__dict__["cpu_policy"] = cpu_policy
__props__.__dict__["custom_san"] = custom_san
__props__.__dict__["deletion_protection"] = deletion_protection
__props__.__dict__["enable_ssh"] = enable_ssh
__props__.__dict__["exclude_autoscaler_nodes"] = exclude_autoscaler_nodes
__props__.__dict__["image_id"] = image_id
__props__.__dict__["install_cloud_monitor"] = install_cloud_monitor
__props__.__dict__["is_enterprise_security_group"] = is_enterprise_security_group
__props__.__dict__["key_name"] = key_name
__props__.__dict__["kms_encrypted_password"] = kms_encrypted_password
__props__.__dict__["kms_encryption_context"] = kms_encryption_context
__props__.__dict__["kube_config"] = kube_config
__props__.__dict__["load_balancer_spec"] = load_balancer_spec
__props__.__dict__["master_auto_renew"] = master_auto_renew
__props__.__dict__["master_auto_renew_period"] = master_auto_renew_period
__props__.__dict__["master_disk_category"] = master_disk_category
__props__.__dict__["master_disk_performance_level"] = master_disk_performance_level
__props__.__dict__["master_disk_size"] = master_disk_size
__props__.__dict__["master_disk_snapshot_policy_id"] = master_disk_snapshot_policy_id
__props__.__dict__["master_instance_charge_type"] = master_instance_charge_type
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_nodes"] = master_nodes
__props__.__dict__["master_period"] = master_period
__props__.__dict__["master_period_unit"] = master_period_unit
__props__.__dict__["master_vswitch_ids"] = master_vswitch_ids
__props__.__dict__["name"] = name
__props__.__dict__["name_prefix"] = name_prefix
__props__.__dict__["nat_gateway_id"] = nat_gateway_id
__props__.__dict__["new_nat_gateway"] = new_nat_gateway
__props__.__dict__["node_cidr_mask"] = node_cidr_mask
__props__.__dict__["node_name_mode"] = node_name_mode
__props__.__dict__["node_port_range"] = node_port_range
__props__.__dict__["os_type"] = os_type
__props__.__dict__["password"] = password
__props__.__dict__["platform"] = platform
__props__.__dict__["pod_cidr"] = pod_cidr
__props__.__dict__["pod_vswitch_ids"] = pod_vswitch_ids
__props__.__dict__["proxy_mode"] = proxy_mode
__props__.__dict__["rds_instances"] = rds_instances
__props__.__dict__["resource_group_id"] = resource_group_id
__props__.__dict__["runtime"] = runtime
__props__.__dict__["security_group_id"] = security_group_id
__props__.__dict__["service_account_issuer"] = service_account_issuer
__props__.__dict__["service_cidr"] = service_cidr
__props__.__dict__["slb_id"] = slb_id
__props__.__dict__["slb_internet"] = slb_internet
__props__.__dict__["slb_internet_enabled"] = slb_internet_enabled
__props__.__dict__["slb_intranet"] = slb_intranet
__props__.__dict__["tags"] = tags
__props__.__dict__["taints"] = taints
__props__.__dict__["timezone"] = timezone
__props__.__dict__["user_ca"] = user_ca
__props__.__dict__["user_data"] = user_data
__props__.__dict__["version"] = version
__props__.__dict__["vpc_id"] = vpc_id
__props__.__dict__["worker_auto_renew"] = worker_auto_renew
__props__.__dict__["worker_auto_renew_period"] = worker_auto_renew_period
__props__.__dict__["worker_data_disk_category"] = worker_data_disk_category
__props__.__dict__["worker_data_disk_size"] = worker_data_disk_size
__props__.__dict__["worker_data_disks"] = worker_data_disks
__props__.__dict__["worker_disk_category"] = worker_disk_category
__props__.__dict__["worker_disk_performance_level"] = worker_disk_performance_level
__props__.__dict__["worker_disk_size"] = worker_disk_size
__props__.__dict__["worker_disk_snapshot_policy_id"] = worker_disk_snapshot_policy_id
__props__.__dict__["worker_instance_charge_type"] = worker_instance_charge_type
__props__.__dict__["worker_instance_types"] = worker_instance_types
__props__.__dict__["worker_nodes"] = worker_nodes
__props__.__dict__["worker_number"] = worker_number
__props__.__dict__["worker_period"] = worker_period
__props__.__dict__["worker_period_unit"] = worker_period_unit
__props__.__dict__["worker_ram_role_name"] = worker_ram_role_name
__props__.__dict__["worker_vswitch_ids"] = worker_vswitch_ids
return Kubernetes(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def addons(self) -> pulumi.Output[Optional[Sequence['outputs.KubernetesAddon']]]:
return pulumi.get(self, "addons")
@property
@pulumi.getter(name="apiAudiences")
def api_audiences(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of API audiences for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm). Set this to `["kubernetes.default.svc"]` if you want to enable the Token Volume Projection feature (requires specifying `service_account_issuer` as well.
"""
return pulumi.get(self, "api_audiences")
@property
@pulumi.getter(name="availabilityZone")
def availability_zone(self) -> pulumi.Output[str]:
"""
The Zone where new kubernetes cluster will be located. If it is not be specified, the `vswitch_ids` should be set, its value will be vswitch's zone.
"""
return pulumi.get(self, "availability_zone")
@property
@pulumi.getter(name="certificateAuthority")
def certificate_authority(self) -> pulumi.Output['outputs.KubernetesCertificateAuthority']:
"""
(Available in 1.105.0+) Nested attribute containing certificate authority data for your cluster.
"""
return pulumi.get(self, "certificate_authority")
@property
@pulumi.getter(name="clientCert")
def client_cert(self) -> pulumi.Output[Optional[str]]:
"""
The path of client certificate, like `~/.kube/client-cert.pem`.
"""
return pulumi.get(self, "client_cert")
@property
@pulumi.getter(name="clientKey")
def client_key(self) -> pulumi.Output[Optional[str]]:
"""
The path of client key, like `~/.kube/client-key.pem`.
"""
return pulumi.get(self, "client_key")
@property
@pulumi.getter(name="clusterCaCert")
def cluster_ca_cert(self) -> pulumi.Output[Optional[str]]:
"""
The path of cluster ca certificate, like `~/.kube/cluster-ca-cert.pem`
"""
return pulumi.get(self, "cluster_ca_cert")
@property
@pulumi.getter(name="clusterDomain")
def cluster_domain(self) -> pulumi.Output[Optional[str]]:
"""
Cluster local domain name, Default to `cluster.local`. A domain name consists of one or more sections separated by a decimal point (.), each of which is up to 63 characters long, and can be lowercase, numerals, and underscores (-), and must be lowercase or numerals at the beginning and end.
"""
return pulumi.get(self, "cluster_domain")
@property
@pulumi.getter
def connections(self) -> pulumi.Output['outputs.KubernetesConnections']:
"""
Map of kubernetes cluster connection information.
"""
return pulumi.get(self, "connections")
@property
@pulumi.getter(name="cpuPolicy")
def cpu_policy(self) -> pulumi.Output[Optional[str]]:
"""
Kubelet cpu policy. For Kubernetes 1.12.6 and later, its valid value is either `static` or `none`. Default to `none`.
"""
return pulumi.get(self, "cpu_policy")
@property
@pulumi.getter(name="customSan")
def custom_san(self) -> pulumi.Output[Optional[str]]:
"""
Customize the certificate SAN, multiple IP or domain names are separated by English commas (,).
"""
return pulumi.get(self, "custom_san")
@property
@pulumi.getter(name="deletionProtection")
def deletion_protection(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to enable cluster deletion protection.
"""
return pulumi.get(self, "deletion_protection")
@property
@pulumi.getter(name="enableSsh")
def enable_ssh(self) -> pulumi.Output[Optional[bool]]:
"""
Enable login to the node through SSH. Default to `false`.
"""
return pulumi.get(self, "enable_ssh")
@property
@pulumi.getter(name="excludeAutoscalerNodes")
def exclude_autoscaler_nodes(self) -> pulumi.Output[Optional[bool]]:
"""
Exclude autoscaler nodes from `worker_nodes`. Default to `false`.
"""
return pulumi.get(self, "exclude_autoscaler_nodes")
@property
@pulumi.getter(name="imageId")
def image_id(self) -> pulumi.Output[Optional[str]]:
"""
Custom Image support. Must based on CentOS7 or AliyunLinux2.
"""
return pulumi.get(self, "image_id")
@property
@pulumi.getter(name="installCloudMonitor")
def install_cloud_monitor(self) -> pulumi.Output[Optional[bool]]:
"""
Install cloud monitor agent on ECS. Default to `true`.
"""
return pulumi.get(self, "install_cloud_monitor")
@property
@pulumi.getter(name="isEnterpriseSecurityGroup")
def is_enterprise_security_group(self) -> pulumi.Output[bool]:
"""
Enable to create advanced security group. default: false. See [Advanced security group](https://www.alibabacloud.com/help/doc-detail/120621.htm).
"""
return pulumi.get(self, "is_enterprise_security_group")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> pulumi.Output[Optional[str]]:
"""
The keypair of ssh login cluster node, you have to create it first. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="kmsEncryptedPassword")
def kms_encrypted_password(self) -> pulumi.Output[Optional[str]]:
"""
An KMS encrypts password used to a cs kubernetes. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "kms_encrypted_password")
@property
@pulumi.getter(name="kmsEncryptionContext")
def kms_encryption_context(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
An KMS encryption context used to decrypt `kms_encrypted_password` before creating or updating a cs kubernetes with `kms_encrypted_password`. See [Encryption Context](https://www.alibabacloud.com/help/doc-detail/42975.htm). It is valid when `kms_encrypted_password` is set.
"""
return pulumi.get(self, "kms_encryption_context")
@property
@pulumi.getter(name="kubeConfig")
def kube_config(self) -> pulumi.Output[Optional[str]]:
"""
The path of kube config, like `~/.kube/config`.
"""
return pulumi.get(self, "kube_config")
@property
@pulumi.getter(name="loadBalancerSpec")
def load_balancer_spec(self) -> pulumi.Output[Optional[str]]:
"""
The cluster api server load balance instance specification, default `slb.s1.small`. For more information on how to select a LB instance specification, see [SLB instance overview](https://help.aliyun.com/document_detail/85931.html).
"""
return pulumi.get(self, "load_balancer_spec")
@property
@pulumi.getter(name="masterAutoRenew")
def master_auto_renew(self) -> pulumi.Output[Optional[bool]]:
"""
Enable master payment auto-renew, defaults to false.
"""
return pulumi.get(self, "master_auto_renew")
@property
@pulumi.getter(name="masterAutoRenewPeriod")
def master_auto_renew_period(self) -> pulumi.Output[Optional[int]]:
"""
Master payment auto-renew period, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "master_auto_renew_period")
@property
@pulumi.getter(name="masterDiskCategory")
def master_disk_category(self) -> pulumi.Output[Optional[str]]:
"""
The system disk category of master node. Its valid value are `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "master_disk_category")
@property
@pulumi.getter(name="masterDiskPerformanceLevel")
def master_disk_performance_level(self) -> pulumi.Output[Optional[str]]:
"""
Master node system disk performance level. When `master_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "master_disk_performance_level")
@property
@pulumi.getter(name="masterDiskSize")
def master_disk_size(self) -> pulumi.Output[Optional[int]]:
"""
The system disk size of master node. Its valid value range [20~500] in GB. Default to 20.
"""
return pulumi.get(self, "master_disk_size")
@property
@pulumi.getter(name="masterDiskSnapshotPolicyId")
def master_disk_snapshot_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
Master node system disk auto snapshot policy.
"""
return pulumi.get(self, "master_disk_snapshot_policy_id")
@property
@pulumi.getter(name="masterInstanceChargeType")
def master_instance_charge_type(self) -> pulumi.Output[Optional[str]]:
"""
Master payment type. or `PostPaid` or `PrePaid`, defaults to `PostPaid`. If value is `PrePaid`, the files `master_period`, `master_period_unit`, `master_auto_renew` and `master_auto_renew_period` are required.
"""
return pulumi.get(self, "master_instance_charge_type")
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> pulumi.Output[Sequence[str]]:
"""
The instance type of master node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "master_instance_types")
@property
@pulumi.getter(name="masterNodes")
def master_nodes(self) -> pulumi.Output[Sequence['outputs.KubernetesMasterNode']]:
"""
List of cluster master nodes.
"""
return pulumi.get(self, "master_nodes")
@property
@pulumi.getter(name="masterPeriod")
def master_period(self) -> pulumi.Output[Optional[int]]:
"""
Master payment period.Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "master_period")
@property
@pulumi.getter(name="masterPeriodUnit")
def master_period_unit(self) -> pulumi.Output[Optional[str]]:
"""
Master payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "master_period_unit")
@property
@pulumi.getter(name="masterVswitchIds")
def master_vswitch_ids(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "master_vswitch_ids")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The kubernetes cluster's name. It is unique in one Alicloud account.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "name_prefix")
@property
@pulumi.getter(name="natGatewayId")
def nat_gateway_id(self) -> pulumi.Output[str]:
"""
The ID of nat gateway used to launch kubernetes cluster.
"""
return pulumi.get(self, "nat_gateway_id")
@property
@pulumi.getter(name="newNatGateway")
def new_nat_gateway(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to create a new nat gateway while creating kubernetes cluster. Default to true. Then openapi in Alibaba Cloud are not all on intranet, So turn this option on is a good choice.
"""
return pulumi.get(self, "new_nat_gateway")
@property
@pulumi.getter(name="nodeCidrMask")
def node_cidr_mask(self) -> pulumi.Output[Optional[int]]:
"""
The node cidr block to specific how many pods can run on single node. 24-28 is allowed. 24 means 2^(32-24)-1=255 and the node can run at most 255 pods. default: 24
"""
return pulumi.get(self, "node_cidr_mask")
@property
@pulumi.getter(name="nodeNameMode")
def node_name_mode(self) -> pulumi.Output[Optional[str]]:
"""
Each node name consists of a prefix, an IP substring, and a suffix. For example, if the node IP address is 192.168.0.55, the prefix is aliyun.com, IP substring length is 5, and the suffix is test, the node name will be `aliyun.com00055test`.
"""
return pulumi.get(self, "node_name_mode")
@property
@pulumi.getter(name="nodePortRange")
def node_port_range(self) -> pulumi.Output[Optional[str]]:
"""
The service port range of nodes, valid values: `30000` to `65535`. Default to `30000-32767`.
"""
return pulumi.get(self, "node_port_range")
@property
@pulumi.getter(name="osType")
def os_type(self) -> pulumi.Output[Optional[str]]:
"""
The operating system of the nodes that run pods, its valid value is either `Linux` or `Windows`. Default to `Linux`.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter
def password(self) -> pulumi.Output[Optional[str]]:
"""
The password of ssh login cluster node. You have to specify one of `password` `key_name` `kms_encrypted_password` fields.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter
def platform(self) -> pulumi.Output[Optional[str]]:
"""
The architecture of the nodes that run pods, its valid value is either `CentOS` or `AliyunLinux`. Default to `CentOS`.
"""
return pulumi.get(self, "platform")
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> pulumi.Output[Optional[str]]:
"""
- [Flannel Specific] The CIDR block for the pod network when using Flannel.
"""
return pulumi.get(self, "pod_cidr")
@property
@pulumi.getter(name="podVswitchIds")
def pod_vswitch_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
- [Terway Specific] The vswitches for the pod network when using Terway.Be careful the `pod_vswitch_ids` can not equal to `worker_vswtich_ids` or `master_vswtich_ids` but must be in same availability zones.
"""
return pulumi.get(self, "pod_vswitch_ids")
@property
@pulumi.getter(name="proxyMode")
def proxy_mode(self) -> pulumi.Output[Optional[str]]:
"""
Proxy mode is option of kube-proxy. options: iptables | ipvs. default: ipvs.
"""
return pulumi.get(self, "proxy_mode")
@property
@pulumi.getter(name="rdsInstances")
def rds_instances(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
RDS instance list, You can choose which RDS instances whitelist to add instances to.
"""
return pulumi.get(self, "rds_instances")
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> pulumi.Output[str]:
"""
The ID of the resource group,by default these cloud resources are automatically assigned to the default resource group.
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter
def runtime(self) -> pulumi.Output[Optional['outputs.KubernetesRuntime']]:
"""
The runtime of containers. Default to `docker`. If you select another container runtime, see [How do I select between Docker and Sandboxed-Container](https://www.alibabacloud.com/help/doc-detail/160313.htm?spm=a2c63.p38356.b99.440.22563866AJkBgI). Detailed below.
"""
return pulumi.get(self, "runtime")
@property
@pulumi.getter(name="securityGroupId")
def security_group_id(self) -> pulumi.Output[str]:
"""
The ID of the security group to which the ECS instances in the cluster belong. If it is not specified, a new Security group will be built.
"""
return pulumi.get(self, "security_group_id")
@property
@pulumi.getter(name="serviceAccountIssuer")
def service_account_issuer(self) -> pulumi.Output[Optional[str]]:
"""
The issuer of the Service Account token for [Service Account Token Volume Projection](https://www.alibabacloud.com/help/doc-detail/160384.htm), corresponds to the `iss` field in the token payload. Set this to `"kubernetes.default.svc"` to enable the Token Volume Projection feature (requires specifying `api_audiences` as well).
"""
return pulumi.get(self, "service_account_issuer")
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> pulumi.Output[Optional[str]]:
"""
The CIDR block for the service network. It cannot be duplicated with the VPC CIDR and CIDR used by Kubernetes cluster in VPC, cannot be modified after creation.
"""
return pulumi.get(self, "service_cidr")
@property
@pulumi.getter(name="slbId")
def slb_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "slb_id")
@property
@pulumi.getter(name="slbInternet")
def slb_internet(self) -> pulumi.Output[str]:
return pulumi.get(self, "slb_internet")
@property
@pulumi.getter(name="slbInternetEnabled")
def slb_internet_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether to create internet load balancer for API Server. Default to true.
"""
return pulumi.get(self, "slb_internet_enabled")
@property
@pulumi.getter(name="slbIntranet")
def slb_intranet(self) -> pulumi.Output[str]:
"""
The ID of private load balancer where the current cluster master node is located.
"""
return pulumi.get(self, "slb_intranet")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
Default nil, A map of tags assigned to the kubernetes cluster and work nodes. Detailed below.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> pulumi.Output[Optional[Sequence['outputs.KubernetesTaint']]]:
"""
Taints ensure pods are not scheduled onto inappropriate nodes. One or more taints are applied to a node; this marks that the node should not accept any pods that do not tolerate the taints. For more information, see [Taints and Tolerations](https://kubernetes.io/docs/concepts/scheduling-eviction/taint-and-toleration/). Detailed below.
"""
return pulumi.get(self, "taints")
@property
@pulumi.getter
def timezone(self) -> pulumi.Output[Optional[str]]:
"""
When you create a cluster, set the time zones for the Master and Woker nodes. You can only change the managed node time zone if you create a cluster. Once the cluster is created, you can only change the time zone of the Worker node.
"""
return pulumi.get(self, "timezone")
@property
@pulumi.getter(name="userCa")
def user_ca(self) -> pulumi.Output[Optional[str]]:
"""
The path of customized CA cert, you can use this CA to sign client certs to connect your cluster.
"""
return pulumi.get(self, "user_ca")
@property
@pulumi.getter(name="userData")
def user_data(self) -> pulumi.Output[Optional[str]]:
"""
Custom data that can execute on nodes. For more information, see [Prepare user data](https://www.alibabacloud.com/help/doc-detail/49121.htm).
"""
return pulumi.get(self, "user_data")
@property
@pulumi.getter
def version(self) -> pulumi.Output[str]:
"""
Desired Kubernetes version. If you do not specify a value, the latest available version at resource creation is used and no upgrades will occur except you set a higher version number. The value must be configured and increased to upgrade the version when desired. Downgrades are not supported by ACK.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> pulumi.Output[str]:
"""
The ID of VPC where the current cluster is located.
"""
return pulumi.get(self, "vpc_id")
@property
@pulumi.getter(name="workerAutoRenew")
def worker_auto_renew(self) -> pulumi.Output[Optional[bool]]:
"""
Enable worker payment auto-renew, defaults to false.
"""
return pulumi.get(self, "worker_auto_renew")
@property
@pulumi.getter(name="workerAutoRenewPeriod")
def worker_auto_renew_period(self) -> pulumi.Output[Optional[int]]:
"""
Worker payment auto-renew period,, it can be one of {1, 2, 3, 6, 12}.
"""
return pulumi.get(self, "worker_auto_renew_period")
@property
@pulumi.getter(name="workerDataDiskCategory")
def worker_data_disk_category(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "worker_data_disk_category")
@property
@pulumi.getter(name="workerDataDiskSize")
def worker_data_disk_size(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "worker_data_disk_size")
@property
@pulumi.getter(name="workerDataDisks")
def worker_data_disks(self) -> pulumi.Output[Optional[Sequence['outputs.KubernetesWorkerDataDisk']]]:
"""
The data disk configurations of worker nodes, such as the disk type and disk size.
"""
return pulumi.get(self, "worker_data_disks")
@property
@pulumi.getter(name="workerDiskCategory")
def worker_disk_category(self) -> pulumi.Output[Optional[str]]:
"""
The system disk category of worker node. Its valid value are `cloud`, `cloud_ssd`, `cloud_essd` and `cloud_efficiency`. Default to `cloud_efficiency`.
"""
return pulumi.get(self, "worker_disk_category")
@property
@pulumi.getter(name="workerDiskPerformanceLevel")
def worker_disk_performance_level(self) -> pulumi.Output[Optional[str]]:
"""
Worker node system disk performance level, when `worker_disk_category` values `cloud_essd`, the optional values are `PL0`, `PL1`, `PL2` or `PL3`, but the specific performance level is related to the disk capacity. For more information, see [Enhanced SSDs](https://www.alibabacloud.com/help/doc-detail/122389.htm). Default is `PL1`.
"""
return pulumi.get(self, "worker_disk_performance_level")
@property
@pulumi.getter(name="workerDiskSize")
def worker_disk_size(self) -> pulumi.Output[Optional[int]]:
"""
The system disk size of worker node. Its valid value range [40~500] in GB. Default to 40.
"""
return pulumi.get(self, "worker_disk_size")
@property
@pulumi.getter(name="workerDiskSnapshotPolicyId")
def worker_disk_snapshot_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
Worker node system disk auto snapshot policy.
"""
return pulumi.get(self, "worker_disk_snapshot_policy_id")
@property
@pulumi.getter(name="workerInstanceChargeType")
def worker_instance_charge_type(self) -> pulumi.Output[Optional[str]]:
"""
Worker payment type, its valid value is either or `PostPaid` or `PrePaid`. Defaults to `PostPaid`. If value is `PrePaid`, the files `worker_period`, `worker_period_unit`, `worker_auto_renew` and `worker_auto_renew_period` are required.
"""
return pulumi.get(self, "worker_instance_charge_type")
@property
@pulumi.getter(name="workerInstanceTypes")
def worker_instance_types(self) -> pulumi.Output[Sequence[str]]:
"""
The instance type of worker node. Specify one type for single AZ Cluster, three types for MultiAZ Cluster.
"""
return pulumi.get(self, "worker_instance_types")
@property
@pulumi.getter(name="workerNodes")
def worker_nodes(self) -> pulumi.Output[Sequence['outputs.KubernetesWorkerNode']]:
"""
List of cluster worker nodes.
"""
return pulumi.get(self, "worker_nodes")
@property
@pulumi.getter(name="workerNumber")
def worker_number(self) -> pulumi.Output[int]:
"""
The worker node number of the kubernetes cluster. Default to 3. It is limited up to 50 and if you want to enlarge it, please apply white list or contact with us.
"""
return pulumi.get(self, "worker_number")
@property
@pulumi.getter(name="workerPeriod")
def worker_period(self) -> pulumi.Output[Optional[int]]:
"""
Worker payment period. The unit is `Month`. Its valid value is one of {1, 2, 3, 6, 12, 24, 36, 48, 60}.
"""
return pulumi.get(self, "worker_period")
@property
@pulumi.getter(name="workerPeriodUnit")
def worker_period_unit(self) -> pulumi.Output[Optional[str]]:
"""
Worker payment period unit, the valid value is `Month`.
"""
return pulumi.get(self, "worker_period_unit")
@property
@pulumi.getter(name="workerRamRoleName")
def worker_ram_role_name(self) -> pulumi.Output[str]:
"""
The RamRole Name attached to worker node.
"""
return pulumi.get(self, "worker_ram_role_name")
@property
@pulumi.getter(name="workerVswitchIds")
def worker_vswitch_ids(self) -> pulumi.Output[Sequence[str]]:
return pulumi.get(self, "worker_vswitch_ids")
| 58.032285
| 437
| 0.68748
| 27,578
| 212,108
| 5.063819
| 0.022518
| 0.085779
| 0.09034
| 0.060494
| 0.98048
| 0.974808
| 0.966452
| 0.961511
| 0.954458
| 0.934902
| 0
| 0.00876
| 0.207805
| 212,108
| 3,654
| 438
| 58.048166
| 0.822336
| 0.372164
| 0
| 0.889371
| 1
| 0.000868
| 0.130795
| 0.048565
| 0
| 0
| 0
| 0
| 0
| 1
| 0.170065
| false
| 0.021692
| 0.003037
| 0.009544
| 0.275922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
95557adc210b24d0af49b69c96374789d753a139
| 64
|
py
|
Python
|
templates/congig.py
|
emilo1/cooking-rec
|
f959e2cc7a32730ce7332a0528a32ad0fc01aed6
|
[
"MIT"
] | null | null | null |
templates/congig.py
|
emilo1/cooking-rec
|
f959e2cc7a32730ce7332a0528a32ad0fc01aed6
|
[
"MIT"
] | null | null | null |
templates/congig.py
|
emilo1/cooking-rec
|
f959e2cc7a32730ce7332a0528a32ad0fc01aed6
|
[
"MIT"
] | null | null | null |
api_key = "559df1ea76144882a6506999036679f7"
dbkey = "Maura123$"
| 32
| 44
| 0.8125
| 5
| 64
| 10.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.491525
| 0.078125
| 64
| 2
| 45
| 32
| 0.372881
| 0
| 0
| 0
| 0
| 0
| 0.630769
| 0.492308
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9577c01b2448983343c1eb2db569bcc3425901eb
| 190
|
py
|
Python
|
mcx/inference/__init__.py
|
kancurochat/mcx
|
ceaa175f5b092af46b9deadcc8401f0a30f225b2
|
[
"Apache-2.0"
] | 1
|
2021-06-29T11:13:05.000Z
|
2021-06-29T11:13:05.000Z
|
mcx/inference/__init__.py
|
kancurochat/mcx
|
ceaa175f5b092af46b9deadcc8401f0a30f225b2
|
[
"Apache-2.0"
] | null | null | null |
mcx/inference/__init__.py
|
kancurochat/mcx
|
ceaa175f5b092af46b9deadcc8401f0a30f225b2
|
[
"Apache-2.0"
] | null | null | null |
from .metrics import gaussian_euclidean_metric
from .proposals import empirical_hmc_proposal, hmc_proposal
__all__ = ["gaussian_euclidean_metric", "empirical_hmc_proposal", "hmc_proposal"]
| 38
| 81
| 0.847368
| 23
| 190
| 6.391304
| 0.478261
| 0.29932
| 0.312925
| 0.312925
| 0.421769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 190
| 4
| 82
| 47.5
| 0.84
| 0
| 0
| 0
| 0
| 0
| 0.310526
| 0.247368
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
957c99412cc41bfe95485d113f2d4d7887e86ece
| 474
|
py
|
Python
|
Code06-02.py
|
jaywoong/learn_algorithm
|
31b0da20019d300b730a22da86072c22967a2966
|
[
"Apache-2.0"
] | null | null | null |
Code06-02.py
|
jaywoong/learn_algorithm
|
31b0da20019d300b730a22da86072c22967a2966
|
[
"Apache-2.0"
] | null | null | null |
Code06-02.py
|
jaywoong/learn_algorithm
|
31b0da20019d300b730a22da86072c22967a2966
|
[
"Apache-2.0"
] | null | null | null |
stack = ["커피", "녹차", "꿀물", None, None]
top = 2
print("--- 스택 상태-----")
for i in range(len(stack)-1, -1, -1) :
print(stack[i])
print("-------------------")
data = stack[top]
stack[top] = None
top -= 1
print("pop -->", data)
data = stack[top]
stack[top] = None
top -= 1
print("pop -->", data)
data = stack[top]
stack[top] = None
top -= 1
print("pop -->", data)
print("-------------------")
print("--- 스택 상태-----")
for i in range(len(stack)-1, -1, -1) :
print(stack[i])
| 16.928571
| 38
| 0.5
| 73
| 474
| 3.246575
| 0.246575
| 0.202532
| 0.151899
| 0.21519
| 0.860759
| 0.860759
| 0.860759
| 0.860759
| 0.860759
| 0.860759
| 0
| 0.025189
| 0.162447
| 474
| 27
| 39
| 17.555556
| 0.571788
| 0
| 0
| 0.909091
| 0
| 0
| 0.196203
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.409091
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
95c55ab47c807fe235a5b26886a84b5517c2929b
| 21
|
py
|
Python
|
Aulas Python/Aulas/Aula01-Primeiros comdando em Python3/ex02.py
|
marcosviniciusbarbosa/Curso-Python
|
fc6bba3a6d0adfd51d63f789dec83b5d3ac83e4b
|
[
"MIT"
] | null | null | null |
Aulas Python/Aulas/Aula01-Primeiros comdando em Python3/ex02.py
|
marcosviniciusbarbosa/Curso-Python
|
fc6bba3a6d0adfd51d63f789dec83b5d3ac83e4b
|
[
"MIT"
] | null | null | null |
Aulas Python/Aulas/Aula01-Primeiros comdando em Python3/ex02.py
|
marcosviniciusbarbosa/Curso-Python
|
fc6bba3a6d0adfd51d63f789dec83b5d3ac83e4b
|
[
"MIT"
] | null | null | null |
#Ex02
print(2+4)
7+10
| 7
| 10
| 0.666667
| 6
| 21
| 2.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.368421
| 0.095238
| 21
| 3
| 11
| 7
| 0.368421
| 0.190476
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
2517164048dc25c10f8bcf2e4e99a9532bdbf8ef
| 24,509
|
py
|
Python
|
src/bots/cogs/warframe_calculators.py
|
cyork95/KronaBotFam
|
39a4e920a80d6b666fa1a5a1ca075a00e21bf124
|
[
"MIT"
] | 1
|
2020-06-21T05:16:47.000Z
|
2020-06-21T05:16:47.000Z
|
src/bots/cogs/warframe_calculators.py
|
cyork95/KronaBotFam
|
39a4e920a80d6b666fa1a5a1ca075a00e21bf124
|
[
"MIT"
] | null | null | null |
src/bots/cogs/warframe_calculators.py
|
cyork95/KronaBotFam
|
39a4e920a80d6b666fa1a5a1ca075a00e21bf124
|
[
"MIT"
] | null | null | null |
import json
import discord
from discord.ext import commands
with open('./cogs/cog_resources/primary.json') as json_file:
primary = json.load(json_file)
with open('./cogs/cog_resources/melee.json') as json_file:
secondary = json.load(json_file)
with open('./cogs/cog_resources/secondary.json') as json_file:
melee = json.load(json_file)
with open('./cogs/cog_resources/warframes.json') as json_file:
warframes = json.load(json_file)
with open('./cogs/cog_resources/enemy.json') as json_file:
enemy = json.load(json_file)
class WarframeCalculators(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(name="specter", description='Specter Scaling Calculator', brief='Specter Scaling Calculator',
usage='<Warframe> <Weapon Name or TotalDMG> <CurrentLevel> <MissionLevel>')
async def specter(self, ctx, specter, damage, current_level, mission_level, *args):
damage = damage.title()
specter = specter.title()
if not damage.isdigit():
for i in primary:
if damage == i['name']:
damage = int(i['totalDamage']) * int(i['multishot'])
fire_rate = float(i['fireRate'])
for i in secondary:
if damage == i['name']:
damage = int(i['totalDamage']) * int(i['multishot'])
fire_rate = float(i['fireRate'])
for i in melee:
if damage == i['name']:
damage = int(i['totalDamage'])
fire_rate = float(i['fireRate'])
try:
mission_level = mission_level.split('-')
mission_level = ((int(mission_level[0]) + int(mission_level[1])) / 2)
except:
mission_level = mission_level[0]
level_diff = float(current_level) - float(mission_level)
multi = 1 + 0.015 * (float(level_diff)) ** 1.55
damage = float(multi) * float(damage) * float(fire_rate)
for i in warframes:
if specter in i['name']:
if 'old' in args:
health_multi = (1 + float(level_diff) ** 1.75 * 0.005)
shield_multi = (1 + float(level_diff) ** 2 * 0.0075)
armor_multi = (1 + float(level_diff) ** 1.75 * 0.005)
health_value_old = int(i['health']) * health_multi
shield_value_old = int(i['shield']) * shield_multi
armor_value_old = int(i['armor']) * armor_multi
current_shield_value = shield_value_old
current_health_value = health_value_old
current_armor_value = armor_value_old
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
embed = discord.Embed(title=i['name'],
description='**DPS:** {:,}'.format(int(damage)) + '\n**Health:** {:,}'.format(
int(current_health_value)) + '\n**Armor:** {:,}'.format(
int(current_armor_value)) + '\n**Shields:** {:,}'.format(
int(current_shield_value)) + '\n**DR:** {}%'.format(
damage_reduction) + '\n**EHP:** {:,}'.format(int(ehp)),
colour=discord.Colour(0x7ed321))
embed.set_footer(text='This does not take into account damage types.')
await ctx.send(embed=embed)
else:
health_multi_low = (1 + float(level_diff) ** 2 * 0.015)
shield_multi_low = (1 + float(level_diff) ** 1.75 * 0.02)
armor_multi_low = (1 + float(level_diff) ** 1.75 * 0.005)
health_multi_high = (1 + float(level_diff) ** 0.5 * 10.7331)
shield_multi_high = (1 + float(level_diff) ** 0.75 * 1.6)
armor_multi_high = (1 + float(level_diff) ** 0.75 * 0.4)
health_value_low = int(i['health']) * health_multi_low
shield_value_low = int(i['shield']) * shield_multi_low
armor_value_low = int(i['armor']) * armor_multi_low
health_value_high = int(i['health']) * health_multi_high
shield_value_high = int(i['shield']) * shield_multi_high
armor_value_high = int(i['armor']) * armor_multi_high
if int(level_diff) <= 70:
health_multi = health_multi_low
shield_multi = shield_multi_low
armor_multi = armor_multi_low
current_shield_value = shield_value_low
current_health_value = health_value_low
current_armor_value = armor_value_low
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
if int(level_diff) >= 80:
health_multi = health_multi_high
shield_multi = shield_multi_high
armor_multi = armor_multi_high
current_shield_value = shield_value_high
current_health_value = health_value_high
current_armor_value = armor_value_high
damage_reduction = 0
ehp = current_health_value
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
if 70 <= level_diff <= 80:
x = (level_diff - 70) / (80 - 70)
s = (3 * x ** 2) - (2 * x ** 3)
health_multi = shield_value_low + s * shield_value_high
shield_multi = health_value_low + s * health_value_high
armor_multi = armor_value_low + s * armor_value_high
current_shield_value = (1 - s) * shield_value_low + s * shield_value_high
current_health_value = (1 - s) * health_value_low + s * health_value_high
current_armor_value = (1 - s) * armor_value_low + s * armor_value_high
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi
* ((i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
embed = discord.Embed(title=i['name'],
description='**DPS:** {:,}'.format(int(damage)) + '\n**Health:** {:,}'.format(
int(current_health_value)) + '\n**Armor:** {:,}'.format(
int(current_armor_value)) + '\n**Shields:** {:,}'.format(
int(current_shield_value)) + '\n**DR:** {}%'.format(
damage_reduction) + '\n**EHP:** {:,}'.format(int(ehp)),
colour=discord.Colour(0x7ed321))
embed.set_footer(text='This does not take into account damage types.')
await ctx.send(embed=embed)
@specter.error
async def specter_error(self, ctx, error):
embed = discord.Embed(title='Syntax Error',
description='>specter <Warframe> <Base Dmg / Weapon> <CurrentLevel> <MissionLevel>\nUse '
'`""` to use weapons with multiple words.\nUse the base mission level. Ex. '
'Mot = 40\n`!specter Nidus "Prisma Gorgon" 1000 40`',
colour=discord.Colour(0x900f0f))
await ctx.send(embed=embed)
@commands.command(name="enemy")
async def enemy(self, ctx, enemy_usr, damage, current_level, mission_level, *args):
old = ''.join(args)
damage = damage.title()
enemy_x = enemy_usr.title()
fire_rate = ''
try:
if damage == 'Primary':
for i in enemy:
if enemy_x == i['name']:
weapon = i['primary']
for x in primary:
if weapon == x['name']:
damage = int(x['totalDamage']) * int(x['multishot'])
fire_rate = float(x['fireRate'])
for i in enemy:
if enemy_x == i['name']:
weapon = i['primary']
for x in secondary:
if weapon == x['name']:
damage = int(x['totalDamage']) * int(x['multishot'])
fire_rate = float(x['fireRate'])
if damage == 'Melee':
for i in enemy:
if enemy_x == i['name']:
weapon = i['melee']
for x in melee:
if weapon == x['name']:
damage = int(x['totalDamage'])
fire_rate = float(x['fireRate'])
except:
damage = 0
level_diff = float(current_level) - float(mission_level)
multi = 1 + 0.015 * (float(level_diff)) ** 1.55
if fire_rate:
damage = float(multi) * float(damage) * float(fire_rate)
else:
damage = float(multi) * float(damage)
count = 0
for i in enemy:
if enemy_x == i['name']:
if count == 0:
if 'old' in old:
count += 1
health_multi = (1 + float(level_diff) ** 1.75 * 0.005)
shield_multi = (1 + float(level_diff) ** 2 * 0.0075)
armor_multi = (1 + float(level_diff) ** 1.75 * 0.005)
health_value_old = int(i['health']) * health_multi
shield_value_old = int(i['shield']) * shield_multi
armor_value_old = int(i['armor']) * armor_multi
current_shield_value = shield_value_old
current_health_value = health_value_old
current_armor_value = armor_value_old
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
embed = discord.Embed(title=i['name'],
description='**DPS:** {:,}'.format(
int(damage)) + '\n**Health:** {:,}'.format(
int(current_health_value)) + '\n**Armor:** {:,}'.format(
int(current_armor_value)) + '\n**Shields:** {:,}'.format(
int(current_shield_value)) + '\n**DR:** {}%'.format(
damage_reduction) + '\n**EHP:** {:,}'.format(int(ehp)),
colour=discord.Colour(0x7ed321))
embed.set_footer(text='This does not take into account damage types.')
await ctx.send(embed=embed)
else:
count += 1
health_multi_low = (1 + float(level_diff) ** 2 * 0.015)
shield_multi_low = (1 + float(level_diff) ** 1.75 * 0.02)
armor_multi_low = (1 + float(level_diff) ** 1.75 * 0.005)
health_multi_high = (1 + float(level_diff) ** 0.5 * 10.7331)
shield_multi_high = (1 + float(level_diff) ** 0.75 * 1.6)
armor_multi_high = (1 + float(level_diff) ** 0.75 * 0.4)
health_value_low = int(i['health']) * health_multi_low
shield_value_low = int(i['shield']) * shield_multi_low
armor_value_low = int(i['armor']) * armor_multi_low
health_value_high = int(i['health']) * health_multi_high
shield_value_high = int(i['shield']) * shield_multi_high
armor_value_high = int(i['armor']) * armor_multi_high
if int(level_diff) <= 70:
health_multi = health_multi_low
shield_multi = shield_multi_low
armor_multi = armor_multi_low
current_shield_value = shield_value_low
current_health_value = health_value_low
current_armor_value = armor_value_low
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
if int(level_diff) >= 80:
health_multi = health_multi_high
shield_multi = shield_multi_high
armor_multi = armor_multi_high
current_shield_value = shield_value_high
current_health_value = health_value_high
current_armor_value = armor_value_high
damage_reduction = 0
ehp = current_health_value
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
if 70 <= level_diff <= 80:
x = (level_diff - 70) / (80 - 70)
s = (3 * x ** 2) - (2 * x ** 3)
health_multi = shield_value_low + s * shield_value_high
shield_multi = health_value_low + s * health_value_high
armor_multi = armor_value_low + s * armor_value_high
current_shield_value = (1 - s) * shield_value_low + s * shield_value_high
current_health_value = (1 - s) * health_value_low + s * health_value_high
current_armor_value = (1 - s) * armor_value_low + s * armor_value_high
if not i['shield'] == 0 and i['armor'] == 0:
ehp = i['health'] * (health_multi + shield_multi * (i['shield'] / i['health']))
damage_reduction = 0
elif i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (health_multi * (1 + ((i['armor'] * armor_multi) / 300)))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
elif not i['shield'] == 0 and not i['armor'] == 0:
ehp = i['health'] * (
health_multi * (1 + ((i['armor'] * armor_multi) / 300)) + shield_multi * (
(i['shield']) / (i['health'])))
damage_reduction = round((current_armor_value / (current_armor_value + 300)), 5) * 100
damage_reduction = round(damage_reduction, 5)
embed = discord.Embed(title=i['name'],
description='**DPS:** {:,}'.format(
int(damage)) + '\n**Health:** {:,}'.format(
int(current_health_value)) + '\n**Armor:** {:,}'.format(
int(current_armor_value)) + '\n**Shields:** {:,}'.format(
int(current_shield_value)) + '\n**DR:** {}%'.format(
damage_reduction) + '\n**EHP:** {:,}'.format(int(ehp)),
colour=discord.Colour(0x7ed321))
embed.set_footer(text='This does not take into account damage types.')
await ctx.send(embed=embed)
@enemy.error
async def enemy_error(self, ctx, error):
print(error)
embed = discord.Embed(title='Syntax Error',
description='>enemy "Enemy" <primary/melee> <CurrentLevel> <Mission Level>\n`Primary` = '
'Normal weapon\n`Melee` = Disarmed\nUse the base mission level. Ex. Mot = '
'40\nSome enemies may not have known weapon references and will show 0 damage'
'.\n!enemy "Corrupted Heavy Gunner" primary 1000 40',
colour=discord.Colour(0x900f0f))
await ctx.send(embed=embed)
@commands.command(name="ehp")
async def ehp(self, ctx, health, armor):
calc = int(health) * (1 + int(armor) / 300)
ehpe = discord.Embed(title="Effective Health Calculation",
description="**Armor:** *" + str(armor) + "*\n**Health:** *" + str(
health) + "* \n**Effective Health:** *" + str(int(calc)) + "*",
colour=discord.Colour(0x900f0f))
ehpe.set_footer(text="This does not take into account damage types.")
await ctx.send(embed=ehpe)
@ehp.error
async def ehp_error(self, ctx, error):
await ctx.send("Invalid syntax.\n>ehp <total health> <total armor> <total shields>")
@commands.command(name="status")
async def status(self, ctx, base, status):
bs = int(base)
st = float(status)
calc = bs + (bs * (float(st) / 100))
await ctx.send(calc)
@status.error
async def status_error(self, ctx, error):
await ctx.send("Invalid Syntax.\n!status <base status> <status to add>")
def setup(client):
client.add_cog(WarframeCalculators(client))
| 65.183511
| 120
| 0.453833
| 2,468
| 24,509
| 4.28201
| 0.067261
| 0.06056
| 0.07078
| 0.051098
| 0.835636
| 0.826646
| 0.820212
| 0.820212
| 0.791257
| 0.773846
| 0
| 0.035282
| 0.424089
| 24,509
| 375
| 121
| 65.357333
| 0.713425
| 0
| 0
| 0.759104
| 0
| 0.002801
| 0.107716
| 0.006732
| 0
| 0
| 0.002285
| 0
| 0
| 1
| 0.005602
| false
| 0
| 0.008403
| 0
| 0.016807
| 0.002801
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6c076fa911fe516dd23fcc4a546a70af73df43ee
| 463
|
py
|
Python
|
Tests/Environments/Connect4/testCasesTerminalEvaluation.py
|
ikaroszhang96/Convex-AlphaZero
|
d96c9790529e48ff4e2ec34649bdc312a0abcc53
|
[
"MIT"
] | null | null | null |
Tests/Environments/Connect4/testCasesTerminalEvaluation.py
|
ikaroszhang96/Convex-AlphaZero
|
d96c9790529e48ff4e2ec34649bdc312a0abcc53
|
[
"MIT"
] | null | null | null |
Tests/Environments/Connect4/testCasesTerminalEvaluation.py
|
ikaroszhang96/Convex-AlphaZero
|
d96c9790529e48ff4e2ec34649bdc312a0abcc53
|
[
"MIT"
] | null | null | null |
TEST_CASES = [
([
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0]],
False),
([
[-1, -1, -1, 1, -1, 1, 1],
[1, 1, -1, -1, 1, -1, 1],
[-1, -1, 1, 1, 1, -1, -1],
[1, 1, -1, -1, 1, 1, 1],
[-1, -1, -1, 1, -1, -1, 1],
[1, 1, 1, -1, -1, -1, 1]],
True),
]
| 24.368421
| 36
| 0.222462
| 88
| 463
| 1.159091
| 0.068182
| 0.803922
| 1.176471
| 1.529412
| 0.823529
| 0.823529
| 0.823529
| 0.823529
| 0.823529
| 0.823529
| 0
| 0.342857
| 0.470842
| 463
| 18
| 37
| 25.722222
| 0.073469
| 0
| 0
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
6c32888a120f3af396002772fd64afcbc154fa85
| 106
|
py
|
Python
|
tests/test_kv.py
|
hasanatkazmi/kv
|
39228a1431e91063b44ab5952a17bedc219eecb6
|
[
"MIT"
] | null | null | null |
tests/test_kv.py
|
hasanatkazmi/kv
|
39228a1431e91063b44ab5952a17bedc219eecb6
|
[
"MIT"
] | null | null | null |
tests/test_kv.py
|
hasanatkazmi/kv
|
39228a1431e91063b44ab5952a17bedc219eecb6
|
[
"MIT"
] | null | null | null |
from dkv_kv.kv import say_hello
def test_say_hello():
assert say_hello('hkazmi') == 'Hello hkazmi'
| 15.142857
| 48
| 0.716981
| 17
| 106
| 4.176471
| 0.588235
| 0.338028
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169811
| 106
| 6
| 49
| 17.666667
| 0.806818
| 0
| 0
| 0
| 0
| 0
| 0.171429
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6c613d2f7adae3278a6d9e47eac17617af87ea81
| 2,115
|
py
|
Python
|
tests/test_class.py
|
maikereis/BorealAPI
|
2f49787a482302ff587d5435fb5303da6811754c
|
[
"MIT"
] | null | null | null |
tests/test_class.py
|
maikereis/BorealAPI
|
2f49787a482302ff587d5435fb5303da6811754c
|
[
"MIT"
] | null | null | null |
tests/test_class.py
|
maikereis/BorealAPI
|
2f49787a482302ff587d5435fb5303da6811754c
|
[
"MIT"
] | null | null | null |
import pytest
from models import Order, Brewery, Breweries_names
def test_Order():
# previous_order
assert Order(user_client="Joao", order_value=1.2, previous_order=False)
assert Order(user_client="Joao", order_value=1.2, previous_order="n")
assert Order(user_client="Joao", order_value=1.2, previous_order=True)
assert Order(user_client="Joao", order_value=1.2, previous_order="y")
# order_value
assert Order(user_client="Joao", order_value=0, previous_order="y")
assert Order(user_client="Joao", order_value=1502, previous_order="y")
assert Order(user_client="Joao", order_value=3.3, previous_order="y")
with pytest.raises(ValueError):
Order(user_client="Joao", order_value=-1, previous_order=True)
with pytest.raises(ValueError):
Order(user_client="Joao", order_value="a", previous_order=True)
# user_client
assert Order(user_client="Joao", order_value=0, previous_order=True)
assert Order(user_client="Bruna", order_value=0, previous_order=True)
assert Order(user_client="Maria", order_value=0, previous_order=True)
assert Order(user_client="Juan", order_value=0, previous_order=True)
with pytest.raises(ValueError):
Order(user_client="João", order_value=0, previous_order=True)
with pytest.raises(ValueError):
Order(user_client="José", order_value=0, previous_order=True)
with pytest.raises(ValueError):
Order(user_client=1234, order_value=0, previous_order=True)
with pytest.raises(ValueError):
Order(user_client=5.5, order_value=0, previous_order=True)
def test_Brewery():
assert Brewery(name="12 West Brewing Company - Production Facility")
assert Brewery(name="12 West Brewing Company - Production Facility.")
with pytest.raises(ValueError):
Brewery(name="12' West Brewing Company - Production Facility.")
with pytest.raises(ValueError):
Brewery(name="12' West Brewing Company - Production Facility.")
def test_Breweries_list():
assert Breweries_names(names=["dd", "sdad", "Dsa"])
assert Breweries_names(names=[1, "dsd", "sd"])
| 42.3
| 75
| 0.725296
| 292
| 2,115
| 5.044521
| 0.167808
| 0.158859
| 0.173116
| 0.156823
| 0.816022
| 0.816022
| 0.797013
| 0.793618
| 0.793618
| 0.752206
| 0
| 0.021655
| 0.148463
| 2,115
| 49
| 76
| 43.163265
| 0.796224
| 0.017967
| 0
| 0.277778
| 0
| 0
| 0.128799
| 0
| 0
| 0
| 0
| 0
| 0.416667
| 1
| 0.083333
| true
| 0
| 0.055556
| 0
| 0.138889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6c6bed03c8d75b71e2efeef23cfee61a62007ed1
| 125
|
py
|
Python
|
tests/print_error_log.py
|
SynBioHub/synbiohub
|
57f00336714de8f0385d5d6b6053cd2ea4be297b
|
[
"BSD-2-Clause"
] | 53
|
2017-03-13T11:10:24.000Z
|
2022-03-23T00:34:24.000Z
|
tests/print_error_log.py
|
danyentezari/synbiohub
|
09317e3eb3820c596502efad441031835698ad54
|
[
"BSD-2-Clause"
] | 1,049
|
2017-02-17T21:14:42.000Z
|
2022-03-22T22:57:04.000Z
|
tests/print_error_log.py
|
danyentezari/synbiohub
|
09317e3eb3820c596502efad441031835698ad54
|
[
"BSD-2-Clause"
] | 24
|
2017-03-14T07:39:20.000Z
|
2021-11-04T18:51:08.000Z
|
from fetch_logs import get_end_of_error_log
def print_entire_log():
print(get_end_of_error_log(-1))
print_entire_log()
| 17.857143
| 43
| 0.808
| 23
| 125
| 3.826087
| 0.565217
| 0.136364
| 0.181818
| 0.295455
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009009
| 0.112
| 125
| 6
| 44
| 20.833333
| 0.783784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0
| 0.5
| 0.75
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6c782e8f91e33ca1fac71b930a524b36c0ed6337
| 5,515
|
py
|
Python
|
yuntu/collection/utils.py
|
CONABIO/yuntu
|
5ffb3bff88397d1e929d8bd76998cccff2c45db9
|
[
"MIT"
] | null | null | null |
yuntu/collection/utils.py
|
CONABIO/yuntu
|
5ffb3bff88397d1e929d8bd76998cccff2c45db9
|
[
"MIT"
] | 3
|
2021-09-08T01:43:34.000Z
|
2022-03-12T00:17:23.000Z
|
yuntu/collection/utils.py
|
CONABIO-audio/yuntu
|
5ffb3bff88397d1e929d8bd76998cccff2c45db9
|
[
"MIT"
] | 3
|
2021-08-22T05:54:02.000Z
|
2021-10-29T21:43:03.000Z
|
import os
from yuntu.core.audio.base import Audio, AnnotatedAudio
def audioIterator(dataArr,
mediaDir):
for row in dataArr:
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
row["media_info"]["path"] = os.path.join(mediaDir, path)
yield Audio(row["media_info"], fromConfig=True)
def audioArray(dataArr,
mediaDir):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
return [Audio(row["media_info"], fromConfig=True)
for row in dataArr]
def annAudioIterator(dataArr,
mediaDir):
for row in dataArr:
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
row["media_info"]["path"] = os.path.join(mediaDir, path)
yield AnnotatedAudio(row["media_info"],
metadata=row,
fromConfig=True,
annotations=row["data"])
def annAudioArray(dataArr,
mediaDir):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
return [AnnotatedAudio(row["media_info"],
metadata=row,
fromConfig=True,
annotations=row["data"]) for row in dataArr]
def metadataArray(dataArr,
mediaDir):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
return dataArr
def metadataIterator(dataArr,
mediaDir):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
yield dataArr[i]
def annotationIterator(dataArr):
for i in range(len(dataArr)):
yield dataArr[i]
def signalArray(dataArr,
mediaDir,
readSr):
results = []
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
au = Audio(row["media_info"], fromConfig=True)
if readSr is not None:
au.setReadSr(readSr)
signal = au.getSignal()
au.clearMedia()
results.append({"id": dataArr[i]["orid"],
"md5": dataArr[i]["md5"],
"signal": signal})
return results
def signalIterator(dataArr,
mediaDir,
readSr):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
au = Audio(row["media_info"], fromConfig=True)
if readSr is not None:
au.setReadSr(readSr)
signal = au.getSignal()
au.clearMedia()
yield {"id": dataArr[i]["orid"],
"md5": dataArr[i]["md5"],
"signal": signal}
def specArray(dataArr,
mediaDir,
readSr,
n_fft,
hop_length):
results = []
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
au = Audio(row["media_info"], fromConfig=True)
if readSr is not None:
au.setReadSr(readSr)
freqs, spec = au.getSpec(n_fft=n_fft, hop_length=hop_length)
au.clearMedia()
results.append({"id": dataArr[i]["orid"],
"md5": dataArr[i]["md5"],
"freqs": freqs,
"spec": spec})
return results
def specIterator(dataArr,
mediaDir,
readSr,
n_fft,
hop_length):
for i in range(len(dataArr)):
row = dataArr[i]
path = row["media_info"]["path"]
if os.path.dirname(path) == "":
dataArr[i]["media_info"]["path"] = os.path.join(mediaDir, path)
au = Audio(row["media_info"], fromConfig=True)
if readSr is not None:
au.setReadSr(readSr)
freqs, spec = au.getSpec(n_fft=n_fft, hop_length=hop_length)
au.clearMedia()
yield {"id": dataArr[i]["orid"],
"md5": dataArr[i]["md5"],
"freqs": freqs,
"spec": spec}
def buildColDirStruct(colPath,
parts=["db", "parsers", "sql"]):
dbPath = os.path.join(colPath, "db")
parserPath = os.path.join(colPath, "parsers")
sqlPath = os.path.join(colPath, "sql")
if not os.path.exists(colPath):
os.mkdir(colPath)
if not os.path.exists(dbPath):
os.mkdir(dbPath)
if not os.path.exists(parserPath):
os.mkdir(parserPath)
if not os.path.exists(sqlPath):
os.mkdir(sqlPath)
return True
| 30.136612
| 75
| 0.520399
| 620
| 5,515
| 4.564516
| 0.117742
| 0.089046
| 0.084806
| 0.067845
| 0.802827
| 0.766078
| 0.736749
| 0.712721
| 0.712721
| 0.710954
| 0
| 0.002182
| 0.335086
| 5,515
| 182
| 76
| 30.302198
| 0.769566
| 0
| 0
| 0.732877
| 0
| 0
| 0.085222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082192
| false
| 0
| 0.013699
| 0
| 0.136986
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6655dfdfff3ab31296f447c6bac84df349880411
| 31,469
|
py
|
Python
|
custom_commands/mod.py
|
Shikiiii/Asmodeus
|
5d3c5e5352af359fe4305367e0e2ae045da54b3a
|
[
"MIT"
] | 3
|
2019-12-09T16:14:30.000Z
|
2020-08-27T07:00:29.000Z
|
custom_commands/mod.py
|
Shikiiii/defying-thots
|
5d3c5e5352af359fe4305367e0e2ae045da54b3a
|
[
"MIT"
] | null | null | null |
custom_commands/mod.py
|
Shikiiii/defying-thots
|
5d3c5e5352af359fe4305367e0e2ae045da54b3a
|
[
"MIT"
] | 2
|
2019-11-12T17:18:25.000Z
|
2020-08-27T05:36:34.000Z
|
import discord
from discord.ext import commands
import random
import sys
import traceback
import asyncio
import datetime
import json
from datetime import datetime
from common_vars import *
from discord.ext.commands.cooldowns import BucketType
# Commands in this file:
# ban, unban, banid, kick, mute, unmute,
# purge, clear
@bot.command()
@commands.has_permissions(ban_members=True)
async def ban(ctx, user: discord.Member, *, reason: str):
if len(reason) == 0:
punishMsg = discord.Embed(description="{} was banned.\n``Reason:`` N/A\n``Duration:`` -".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
try:
await user.send("You've been banned from {}.".format(ctx.server.name))
except:
embed = discord.Embed(
description="I tried to DM the user, but I'm not allowed to because their DMs aren't open.",
color=0xebf533)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
await ctx.send(embed=embed)
await user.ban(reason="N/A")
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!ban`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_footer(text="{}".format(corfor))
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
else:
punishMsg = discord.Embed(description="{} was banned.\n``Reason:`` {}\n``Duration:`` -".format(user.mention, reason), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
try:
await user.send("You've been banned from {}.".format(ctx.server.name))
except:
embed = discord.Embed(
description="I tried to DM the user, but I'm not allowed to because their DMs aren't open.",
color=0xebf533)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
await ctx.send(embed=embed)
await user.ban(reason=reason)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!ban`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
@ban.error
async def ban_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(
description="Give me a user to ban. \n``TIP:`` If you want to ban a user that's not in the server, try using !banid.",
color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(ban_members=True)
async def unban(ctx, id: int, *, reason: str = ""):
user = await bot.fetch_user(id)
if user is None:
embed = discord.Embed(description="User doesn't exist.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
return
banEntry = await ctx.message.guild.fetch_ban(user)
if banEntry is None:
embed = discord.Embed(description="This user is not banned.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
return
else:
if banEntry.reason is None:
punishMsg = discord.Embed(description="{} was unbanned.".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!unban`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
else:
punishMsg = discord.Embed(description="{} was unbanned.".format(banEntry.user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!unban`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
await ctx.message.guild.unban(banEntry.user, reason="Unbanned by mod.")
@unban.error
async def unban_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user. Is the ID correct?", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(description="I couldn't unban.. no one? Try giving me the ID of an user.",
color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(ban_members=True)
async def banid(ctx, id: int, *, reason: str):
user = await bot.fetch_user(id)
if user is None:
embed = discord.Embed(description="User doesn't exist, duh.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
return
if len(reason) == 0:
punishMsg = discord.Embed(description="{} was banned.\n``Reason:`` N/A\n``Duration:`` -".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!banid`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
await ctx.message.guild.ban(discord.Object(id=id), reason="N/A")
else:
punishMsg = discord.Embed(description="{} was banned.\n``Reason:`` {}\n``Duration:`` -".format(user.mention, reason), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
await ctx.message.guild.ban(discord.Object(id=id), reason=reason)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!banid`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
@banid.error
async def banid_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user. Is the ID correct?", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(description="Give me ID of an user to ban.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(kick_members=True)
async def kick(ctx, user: discord.Member, *, reason: str):
if len(reason) == 0:
punishMsg = discord.Embed(description="{} was kicked.\n``Reason:`` N/A\n``Duration:``-".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
try:
await ctx.send("You've been kicked from {}.".format(ctx.server.name))
except:
embed = discord.Embed(
description="I tried to DM the user, but I'm not allowed to because their DMs aren't open.",
color=0xebf533)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
await ctx.send(embed=embed)
await user.kick(reason="N/A")
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!kick`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
else:
punishMsg = discord.Embed(description="{} was kicked.\n``Reason:`` N/A\n``Duration:``-".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
try:
await ctx.send("You've been kicked from {}.".format(ctx.server.name))
except:
embed = discord.Embed(
description="I tried to DM the user, but I'm not allowed to because their DMs aren't open.",
color=0xebf533)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
await ctx.send(embed=embed)
await user.kick(reason=reason)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!kick`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
@kick.error
async def kick_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user. Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(description="I couldn't kick.. no one? Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(manage_messages=True)
#@commands.cooldown(3, 86400, BucketType.user)
async def mute(ctx, user: discord.Member, *, reason: str):
mutedrole = None
for key, value in serverMuted.items():
if int(key) == ctx.guild.id:
mutedrole = await parse_roles(ctx, str(value))
prefix = "!"
for key, value in serverPrefixes.items():
if int(key) == ctx.guild.id:
prefix = str(value)
if mutedrole is None:
embed = discord.Embed(title="{}".format(ctx.message.author.name), description=".҉ :no_entry: Mute is currently disabled because you do not have a chosen role to use. Please tell an administrator to choose a role with {}setmuted [role].".format(prefix), color=0x000000)
await ctx.send(embed=embed)
return
if mutedrole in user.roles:
embed = discord.Embed(description="This user is already muted.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
return
else:
if len(reason) == 0:
punishMsg = discord.Embed(description="{} was muted.\n``Reason:`` N/A\n``Duration:`` -".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
await user.add_roles(mutedrole, reason="N/A")
await user.edit(mute=True)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!mute`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
else:
punishMsg = discord.Embed(description="{} was muted.\n``Reason:`` {}\n``Duration:`` -".format(user.mention, reason), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
await user.add_roles(mutedrole, reason=reason)
await user.edit(mute=True)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!mute`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
@mute.error
async def mute_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user. Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(description="I couldn't mute.. no one? Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(manage_messages=True)
async def unmute(ctx, user: discord.Member):
mutedrole = None
for key, value in serverMuted.items():
if int(key) == ctx.guild.id:
mutedrole = await parse_roles(ctx, str(value))
prefix = "!"
for key, value in serverPrefixes.items():
if int(key) == ctx.guild.id:
prefix = str(value)
if mutedrole is None:
embed = discord.Embed(title="{}".format(ctx.message.author.name), description=".҉ :no_entry: Mute is currently disabled because you do not have a chosen role to use. Please tell an administrator to choose a role with {}setmuted [role].".format(prefix), color=0x000000)
await ctx.send(embed=embed)
return
if mutedrole in user.roles:
punishMsg = discord.Embed(description="{} was unmuted.".format(user.mention), color=0x000000)
punishMsg.set_author(name="{}".format(ctx.message.author.name))
punishMsg.set_thumbnail(url=user.avatar_url)
await ctx.send(embed=punishMsg)
await user.remove_roles(mutedrole)
await user.edit(mute=False)
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!unmute`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
else:
embed = discord.Embed(description="This user is not muted.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
return
@unmute.error
async def unmute_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user. Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
embed = discord.Embed(description="I couldn't unmute.. no one? Try giving me a correct user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(manage_messages=True)
async def clean(ctx):
def check(m):
return m.author.bot
# rr = todeln + 1
ctx.message.delete()
deleted = await ctx.message.channel.purge(limit=100, check=check)
embed = discord.Embed(description="Cleaned bots' messages.", color=0x000000)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
msg = await ctx.send(embed=embed)
await asyncio.sleep(5)
await msg.delete()
@clean.error
async def clean_error(ctx, error):
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
@bot.command()
@commands.has_permissions(manage_messages=True)
async def purge(ctx, amount, *, user: discord.Member):
try:
todeln = int(amount)
except:
ctx.message.delete()
embed = discord.Embed(description="You didn't enter a number of messages to delete. Try again!", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
msg = await ctx.send(embed=embed)
await asyncio.sleep(10)
await msg.delete()
return
if todeln > 0:
def check(m):
return m.author == user
# rr = todeln + 1
await ctx.message.delete()
deleted = await ctx.message.channel.purge(limit=todeln, check=check)
embed = discord.Embed(description="Successfully purged **{}** messages by **{}**.".format(todeln, user.mention),
color=0x000000)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_thumbnail(url=user.avatar_url)
msg = await ctx.send(embed=embed)
await asyncio.sleep(5)
await msg.delete()
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(description="Used command ``!purge`` in {}:\n{}\n\nMod ID: {}\nUser ID: {}".format(
ctx.message.channel.mention, ctx.message.content, ctx.message.author.id, user.id), color=0xFFFFFF, timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
@purge.error
async def purge_error(ctx, error):
if isinstance(error, commands.BadArgument):
embed = discord.Embed(description="I couldn't find this user.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
if isinstance(error, commands.MissingRequiredArgument):
todeln = int(ctx.message.content[7:])
deleted = await ctx.message.channel.purge(limit=(todeln + 1))
embed = discord.Embed(description="Successfully purged **{}** messages.".format(todeln), color=0x000000)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
msgg = await ctx.send(embed=embed)
await asyncio.sleep(5)
msgg.delete()
logch = None
for key, value in punishLogs.items():
if int(key) == message.author.guild.id:
logch = bot.get_channel(int(value))
if logch is None:
return
log = discord.Embed(
description="Used command ``!purge`` in {}:\n{}\n\nMod ID: {}".format(ctx.message.channel.mention,
ctx.message.content,
ctx.message.author.id),
color=0xFFFFFF,
timestamp=datetime.utcnow())
log.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
log.set_footer(text="{}".format(corfor))
# log.set_thumbnail(url=user.avatar_url)
await logch.send(embed=log)
if isinstance(error, commands.CheckFailure):
embed = discord.Embed(description="You don't have the permissions to use this command.", color=0xFF3639)
embed.set_author(name="{}".format(ctx.message.author), icon_url=ctx.message.author.avatar_url)
embed.set_footer(text="Error raised on: {}".format(ctx.message.content))
await ctx.send(embed=embed)
else:
print('Ignoring exception in command {}:'.format(ctx.command), file=sys.stderr)
traceback.print_exception(type(error), error, None, file=sys.stderr)
| 55.994662
| 276
| 0.646191
| 4,090
| 31,469
| 4.9022
| 0.0511
| 0.091771
| 0.095761
| 0.065835
| 0.943392
| 0.939302
| 0.93217
| 0.921397
| 0.914763
| 0.90793
| 0
| 0.012109
| 0.215355
| 31,469
| 561
| 277
| 56.094474
| 0.799814
| 0.006038
| 0
| 0.825758
| 0
| 0.013258
| 0.134757
| 0
| 0
| 0
| 0.01535
| 0
| 0
| 1
| 0.003788
| false
| 0
| 0.020833
| 0.003788
| 0.068182
| 0.030303
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
668ef2fd83bd6fd2094139cccabd8ec2ff9d12d6
| 97
|
py
|
Python
|
example_model/policy/mlp/__init__.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 60
|
2019-01-29T14:13:00.000Z
|
2020-11-24T09:08:05.000Z
|
example_model/policy/mlp/__init__.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 2
|
2019-08-14T06:44:32.000Z
|
2020-11-12T12:57:55.000Z
|
example_model/policy/mlp/__init__.py
|
SunandBean/tensorflow_RL
|
a248cbfb99b2041f6f7cc008fcad53fb83ac486e
|
[
"MIT"
] | 37
|
2019-01-22T05:19:34.000Z
|
2021-04-12T02:27:50.000Z
|
from example_model.policy.mlp.continuous import *
from example_model.policy.mlp.discrete import *
| 48.5
| 49
| 0.845361
| 14
| 97
| 5.714286
| 0.571429
| 0.275
| 0.4
| 0.55
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072165
| 97
| 2
| 50
| 48.5
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
66b627a42457693c895fb12ca4b4b42440d855d8
| 4,391
|
py
|
Python
|
test/test_van/responses_people.py
|
cmc333333/parsons
|
50804a3627117797570f1e9233c9bbad583f7831
|
[
"Apache-2.0"
] | 3
|
2019-09-05T16:57:15.000Z
|
2019-10-01T19:56:58.000Z
|
test/test_van/responses_people.py
|
cmc333333/parsons
|
50804a3627117797570f1e9233c9bbad583f7831
|
[
"Apache-2.0"
] | 22
|
2019-09-03T13:23:37.000Z
|
2019-10-03T20:32:48.000Z
|
test/test_van/responses_people.py
|
cmc333333/parsons
|
50804a3627117797570f1e9233c9bbad583f7831
|
[
"Apache-2.0"
] | 2
|
2019-09-01T18:30:10.000Z
|
2019-10-03T20:07:46.000Z
|
find_people_response = {
'vanId': 19722445,
'firstName': 'Bob',
'lastName': 'Smith',
'middleName': None,
'suffix': None,
'title': None,
'contactMode': None,
'organizationContactCommonName': None,
'organizationContactOfficialName': None,
'salutation': 'Bob',
'formalSalutation': 'Bob Smith',
'additionalSalutation': None,
'preferredPronoun': None,
'envelopeName': 'Bob Smith',
'formalEnvelopeName': 'Bob Smith',
'additionalEnvelopeName': None,
'contactMethodPreferenceCode': None,
'nickname': None,
'website': None,
'professionalSuffix': None,
'party': None,
'employer': None,
'occupation': None,
'sex': 'F',
'dateOfBirth': '1975-09-18T00:00:00Z',
'selfReportedRace': None,
'selfReportedEthnicity': None,
'selfReportedRaces': None,
'selfReportedEthnicities': None,
'selfReportedGenders': None,
'selfReportedSexualOrientations': None,
'selfReportedLanguagePreference': None,
'emails': None,
'phones': None,
'addresses': None,
'recordedAddresses': None,
'identifiers': None,
'codes': None,
'customFields': None,
'contributionSummary': None,
'suppressions': None,
'caseworkCases': None,
'caseworkIssues': None,
'caseworkStories': None,
'notes': None,
'scores': None,
'customProperties': None,
'electionRecords': None,
'membershipStatus': None,
'organizationRoles': None,
'districts': None
}
get_person_response = {
'vanId': 19722445,
'firstName': 'Bob',
'lastName': 'Smith',
'middleName': None,
'suffix': None,
'title': None,
'contactMode': None,
'organizationContactCommonName': None,
'organizationContactOfficialName': None,
'salutation': 'Bob',
'formalSalutation': 'Bob Smith',
'additionalSalutation': None,
'preferredPronoun': None,
'envelopeName': 'Bob Smith',
'formalEnvelopeName': 'Bob Smith',
'additionalEnvelopeName': None,
'contactMethodPreferenceCode': None,
'nickname': None,
'website': None,
'professionalSuffix': None,
'party': None,
'employer': None,
'occupation': None,
'sex': 'F',
'dateOfBirth': '1975-09-18T00:00:00Z',
'selfReportedRace': None,
'selfReportedEthnicity': None,
'selfReportedRaces': None,
'selfReportedEthnicities': None,
'selfReportedGenders': None,
'selfReportedSexualOrientations': None,
'selfReportedLanguagePreference': None,
'emails': [],
'phones': [{
'phoneId': 15406767,
'phoneNumber': '4142020792',
'ext': None,
'dateCreated': '2019-01-11T12:19:00Z',
'phoneType': 'Cell',
'isPreferred': True,
'smsOptInStatus': 'Unknown',
'phoneOptInStatus': 'Unknown',
'isCellStatus': {
'statusId': 1,
'statusName': 'Verified Cell'
}
}],
'addresses': [{
'addressId': None,
'addressLine1': '900 N Washtenaw Ave',
'addressLine2': None,
'addressLine3': None,
'city': 'Chicago',
'stateOrProvince': 'IL',
'zipOrPostalCode': '60622-4455',
'geoLocation': None,
'countryCode': 'US',
'preview': '900 N Washtenaw Ave ',
'type': 'Voting',
'isPreferred': True,
'streetAddress': '900 N Washtenaw Ave',
'displayMode': 'Standardized'
}, {
'addressId': None,
'addressLine1': '900 N Washtenaw Ave',
'addressLine2': None,
'addressLine3': None,
'city': 'Chicago',
'stateOrProvince': 'IL',
'zipOrPostalCode': '60622-4455',
'geoLocation': None,
'countryCode': 'US',
'preview': '900 N Washtenaw Ave ',
'type': 'Mailing',
'isPreferred': None,
'streetAddress': '900 N Washtenaw Ave',
'displayMode': 'Standardized'
}],
'recordedAddresses': [],
'identifiers': [{
'type': 'StateFileID',
'externalId': 'KLYW682Z'
}],
'codes': None,
'customFields': [],
'contributionSummary': None,
'suppressions': [],
'caseworkCases': None,
'caseworkIssues': None,
'caseworkStories': None,
'notes': None,
'scores': None,
'customProperties': None,
'electionRecords': None,
'membershipStatus': None,
'organizationRoles': None,
'districts': None
}
| 28.329032
| 47
| 0.59212
| 323
| 4,391
| 8.037152
| 0.359133
| 0.01849
| 0.030046
| 0.03698
| 0.805085
| 0.805085
| 0.805085
| 0.765023
| 0.765023
| 0.765023
| 0
| 0.037037
| 0.249829
| 4,391
| 154
| 48
| 28.512987
| 0.751063
| 0
| 0
| 0.769737
| 0
| 0
| 0.505923
| 0.097039
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
66c6098b81204813f5ee5d37bda400897691bc41
| 45,401
|
py
|
Python
|
tally_ho/apps/tally/migrations/0001_initial.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
tally_ho/apps/tally/migrations/0001_initial.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
tally_ho/apps/tally/migrations/0001_initial.py
|
crononauta/tally-ho
|
ba2207bfaef27bee3ff13a393983ca493f767238
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Ballot'
db.create_table(u'tally_ballot', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('number', self.gf('django.db.models.fields.PositiveSmallIntegerField')()),
('race_type', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
))
db.send_create_signal('tally', ['Ballot'])
# Adding model 'Office'
db.create_table(u'tally_office', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=256)),
))
db.send_create_signal('tally', ['Office'])
# Adding model 'SubConstituency'
db.create_table(u'tally_subconstituency', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('ballot_general', self.gf('django.db.models.fields.related.ForeignKey')(related_name='sc_general', null=True, to=orm['tally.Ballot'])),
('ballot_women', self.gf('django.db.models.fields.related.ForeignKey')(related_name='sc_women', null=True, to=orm['tally.Ballot'])),
('code', self.gf('django.db.models.fields.PositiveSmallIntegerField')()),
('component_ballot', self.gf('django.db.models.fields.BooleanField')()),
('field_office', self.gf('django.db.models.fields.CharField')(max_length=256)),
('number_of_ballots', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True)),
('races', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True)),
))
db.send_create_signal('tally', ['SubConstituency'])
# Adding model 'Center'
db.create_table(u'tally_center', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('sub_constituency', self.gf('django.db.models.fields.related.ForeignKey')(related_name='centers', null=True, to=orm['tally.SubConstituency'])),
('center_type', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('code', self.gf('django.db.models.fields.PositiveIntegerField')(unique=True)),
('latitude', self.gf('django.db.models.fields.FloatField')(null=True)),
('longitude', self.gf('django.db.models.fields.FloatField')(null=True)),
('mahalla', self.gf('django.db.models.fields.TextField')()),
('name', self.gf('django.db.models.fields.TextField')()),
('office', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.Office'], null=True)),
('region', self.gf('django.db.models.fields.TextField')()),
('village', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal('tally', ['Center'])
# Adding model 'ResultForm'
db.create_table(u'tally_resultform', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('ballot', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.Ballot'], null=True)),
('center', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.Center'], null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('created_user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='created_user', null=True, to=orm['auth.User'])),
('audited_count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('barcode', self.gf('django.db.models.fields.PositiveIntegerField')(unique=True)),
('date_seen', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('form_stamped', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('form_state', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('gender', self.gf('django.db.models.fields.IntegerField')(default=0, null=True, db_index=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256, null=True)),
('office', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.Office'], null=True)),
('rejected_count', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('serial_number', self.gf('django.db.models.fields.PositiveIntegerField')(unique=True, null=True)),
('skip_quarantine_checks', self.gf('django.db.models.fields.BooleanField')(default=False)),
('station_number', self.gf('django.db.models.fields.PositiveSmallIntegerField')(null=True, blank=True)),
))
db.send_create_signal('tally', ['ResultForm'])
# Adding model 'Archive'
db.create_table(u'tally_archive', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.ResultForm'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
))
db.send_create_signal('tally', ['Archive'])
# Adding model 'QuarantineCheck'
db.create_table(u'tally_quarantinecheck', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=256)),
('method', self.gf('django.db.models.fields.CharField')(unique=True, max_length=256)),
('value', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('tally', ['QuarantineCheck'])
# Adding model 'Audit'
db.create_table(u'tally_audit', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.ResultForm'])),
('supervisor', self.gf('django.db.models.fields.related.ForeignKey')(related_name='audit_user', null=True, to=orm['auth.User'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('for_superadmin', self.gf('django.db.models.fields.BooleanField')(default=False)),
('reviewed_supervisor', self.gf('django.db.models.fields.BooleanField')(default=False)),
('reviewed_team', self.gf('django.db.models.fields.BooleanField')(default=False)),
('blank_reconciliation', self.gf('django.db.models.fields.BooleanField')(default=False)),
('blank_results', self.gf('django.db.models.fields.BooleanField')(default=False)),
('damaged_form', self.gf('django.db.models.fields.BooleanField')(default=False)),
('unclear_figures', self.gf('django.db.models.fields.BooleanField')(default=False)),
('other', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('action_prior_to_recommendation', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('resolution_recommendation', self.gf('django.db.models.fields.IntegerField')(default=0, null=True, db_index=True, blank=True)),
('team_comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('supervisor_comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('tally', ['Audit'])
# Adding M2M table for field quarantine_checks on 'Audit'
m2m_table_name = db.shorten_name(u'tally_audit_quarantine_checks')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('audit', models.ForeignKey(orm['tally.audit'], null=False)),
('quarantinecheck', models.ForeignKey(orm['tally.quarantinecheck'], null=False))
))
db.create_unique(m2m_table_name, ['audit_id', 'quarantinecheck_id'])
# Adding model 'Candidate'
db.create_table(u'tally_candidate', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('ballot', self.gf('django.db.models.fields.related.ForeignKey')(related_name='candidates', to=orm['tally.Ballot'])),
('candidate_id', self.gf('django.db.models.fields.PositiveIntegerField')()),
('full_name', self.gf('django.db.models.fields.TextField')()),
('order', self.gf('django.db.models.fields.PositiveSmallIntegerField')()),
('race_type', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
))
db.send_create_signal('tally', ['Candidate'])
# Adding model 'Clearance'
db.create_table(u'tally_clearance', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(related_name='clearances', to=orm['tally.ResultForm'])),
('supervisor', self.gf('django.db.models.fields.related.ForeignKey')(related_name='clearance_user', null=True, to=orm['auth.User'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('reviewed_supervisor', self.gf('django.db.models.fields.BooleanField')(default=False)),
('reviewed_team', self.gf('django.db.models.fields.BooleanField')(default=False)),
('date_supervisor_modified', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('date_team_modified', self.gf('django.db.models.fields.DateTimeField')(null=True)),
('center_name_missing', self.gf('django.db.models.fields.BooleanField')(default=False)),
('center_name_mismatching', self.gf('django.db.models.fields.BooleanField')(default=False)),
('center_code_missing', self.gf('django.db.models.fields.BooleanField')(default=False)),
('center_code_mismatching', self.gf('django.db.models.fields.BooleanField')(default=False)),
('form_already_in_system', self.gf('django.db.models.fields.BooleanField')(default=False)),
('form_incorrectly_entered_into_system', self.gf('django.db.models.fields.BooleanField')(default=False)),
('other', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('action_prior_to_recommendation', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('resolution_recommendation', self.gf('django.db.models.fields.IntegerField')(default=0, null=True, db_index=True, blank=True)),
('team_comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('supervisor_comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
))
db.send_create_signal('tally', ['Clearance'])
# Adding model 'QualityControl'
db.create_table(u'tally_qualitycontrol', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.ResultForm'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('passed_general', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('passed_reconciliation', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
('passed_women', self.gf('django.db.models.fields.NullBooleanField')(null=True, blank=True)),
))
db.send_create_signal('tally', ['QualityControl'])
# Adding model 'Race'
db.create_table(u'tally_race', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=256)),
('race_type', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
))
db.send_create_signal('tally', ['Race'])
# Adding M2M table for field sub_constituency on 'Race'
m2m_table_name = db.shorten_name(u'tally_race_sub_constituency')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('race', models.ForeignKey(orm['tally.race'], null=False)),
('subconstituency', models.ForeignKey(orm['tally.subconstituency'], null=False))
))
db.create_unique(m2m_table_name, ['race_id', 'subconstituency_id'])
# Adding model 'ReconciliationForm'
db.create_table(u'tally_reconciliationform', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tally.ResultForm'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('entry_version', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('ballot_number_from', self.gf('django.db.models.fields.PositiveIntegerField')()),
('ballot_number_to', self.gf('django.db.models.fields.PositiveIntegerField')()),
('is_stamped', self.gf('django.db.models.fields.BooleanField')()),
('number_ballots_received', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_signatures_in_vr', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_unused_ballots', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_spoiled_ballots', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_cancelled_ballots', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_ballots_outside_box', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_ballots_inside_box', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_ballots_inside_and_outside_box', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_unstamped_ballots', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_invalid_votes', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_valid_votes', self.gf('django.db.models.fields.PositiveIntegerField')()),
('number_sorted_and_counted', self.gf('django.db.models.fields.PositiveIntegerField')()),
('signature_polling_officer_1', self.gf('django.db.models.fields.BooleanField')()),
('signature_polling_officer_2', self.gf('django.db.models.fields.BooleanField')()),
('signature_polling_station_chair', self.gf('django.db.models.fields.BooleanField')()),
('signature_dated', self.gf('django.db.models.fields.BooleanField')()),
))
db.send_create_signal('tally', ['ReconciliationForm'])
# Adding model 'Result'
db.create_table(u'tally_result', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('candidate', self.gf('django.db.models.fields.related.ForeignKey')(related_name='candidates', to=orm['tally.Candidate'])),
('result_form', self.gf('django.db.models.fields.related.ForeignKey')(related_name='results', to=orm['tally.ResultForm'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('entry_version', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('votes', self.gf('django.db.models.fields.PositiveIntegerField')()),
))
db.send_create_signal('tally', ['Result'])
# Adding model 'Station'
db.create_table(u'tally_station', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('center', self.gf('django.db.models.fields.related.ForeignKey')(related_name='stations', to=orm['tally.Center'])),
('sub_constituency', self.gf('django.db.models.fields.related.ForeignKey')(related_name='stations', to=orm['tally.SubConstituency'])),
('gender', self.gf('django.db.models.fields.IntegerField')(default=0, db_index=True)),
('registrants', self.gf('django.db.models.fields.PositiveIntegerField')(null=True)),
('station_number', self.gf('django.db.models.fields.PositiveSmallIntegerField')()),
))
db.send_create_signal('tally', ['Station'])
def backwards(self, orm):
# Deleting model 'Ballot'
db.delete_table(u'tally_ballot')
# Deleting model 'Office'
db.delete_table(u'tally_office')
# Deleting model 'SubConstituency'
db.delete_table(u'tally_subconstituency')
# Deleting model 'Center'
db.delete_table(u'tally_center')
# Deleting model 'ResultForm'
db.delete_table(u'tally_resultform')
# Deleting model 'Archive'
db.delete_table(u'tally_archive')
# Deleting model 'QuarantineCheck'
db.delete_table(u'tally_quarantinecheck')
# Deleting model 'Audit'
db.delete_table(u'tally_audit')
# Removing M2M table for field quarantine_checks on 'Audit'
db.delete_table(db.shorten_name(u'tally_audit_quarantine_checks'))
# Deleting model 'Candidate'
db.delete_table(u'tally_candidate')
# Deleting model 'Clearance'
db.delete_table(u'tally_clearance')
# Deleting model 'QualityControl'
db.delete_table(u'tally_qualitycontrol')
# Deleting model 'Race'
db.delete_table(u'tally_race')
# Removing M2M table for field sub_constituency on 'Race'
db.delete_table(db.shorten_name(u'tally_race_sub_constituency'))
# Deleting model 'ReconciliationForm'
db.delete_table(u'tally_reconciliationform')
# Deleting model 'Result'
db.delete_table(u'tally_result')
# Deleting model 'Station'
db.delete_table(u'tally_station')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tally.archive': {
'Meta': {'object_name': 'Archive'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.ResultForm']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'tally.audit': {
'Meta': {'object_name': 'Audit'},
'action_prior_to_recommendation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'blank_reconciliation': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'blank_results': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'damaged_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'for_superadmin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_checks': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['tally.QuarantineCheck']", 'symmetrical': 'False'}),
'resolution_recommendation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.ResultForm']"}),
'reviewed_supervisor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reviewed_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'supervisor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'audit_user'", 'null': 'True', 'to': u"orm['auth.User']"}),
'supervisor_comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'team_comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'unclear_figures': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'tally.ballot': {
'Meta': {'ordering': "['number']", 'object_name': 'Ballot'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'race_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'tally.candidate': {
'Meta': {'object_name': 'Candidate'},
'ballot': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'candidates'", 'to': "orm['tally.Ballot']"}),
'candidate_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'race_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'})
},
'tally.center': {
'Meta': {'ordering': "['code']", 'object_name': 'Center'},
'center_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'code': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'mahalla': ('django.db.models.fields.TextField', [], {}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.TextField', [], {}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.Office']", 'null': 'True'}),
'region': ('django.db.models.fields.TextField', [], {}),
'sub_constituency': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'centers'", 'null': 'True', 'to': "orm['tally.SubConstituency']"}),
'village': ('django.db.models.fields.TextField', [], {})
},
'tally.clearance': {
'Meta': {'object_name': 'Clearance'},
'action_prior_to_recommendation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'center_code_mismatching': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'center_code_missing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'center_name_mismatching': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'center_name_missing': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_supervisor_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'date_team_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'form_already_in_system': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'form_incorrectly_entered_into_system': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'other': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'resolution_recommendation': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'clearances'", 'to': "orm['tally.ResultForm']"}),
'reviewed_supervisor': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'reviewed_team': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'supervisor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'clearance_user'", 'null': 'True', 'to': u"orm['auth.User']"}),
'supervisor_comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'team_comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'tally.office': {
'Meta': {'ordering': "['name']", 'object_name': 'Office'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'})
},
'tally.qualitycontrol': {
'Meta': {'object_name': 'QualityControl'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'passed_general': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'passed_reconciliation': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'passed_women': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.ResultForm']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
'tally.quarantinecheck': {
'Meta': {'object_name': 'QuarantineCheck'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {})
},
'tally.race': {
'Meta': {'object_name': 'Race'},
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'race_type': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'sub_constituency': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['tally.SubConstituency']", 'symmetrical': 'False'})
},
'tally.reconciliationform': {
'Meta': {'object_name': 'ReconciliationForm'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'ballot_number_from': ('django.db.models.fields.PositiveIntegerField', [], {}),
'ballot_number_to': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entry_version': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_stamped': ('django.db.models.fields.BooleanField', [], {}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'number_ballots_inside_and_outside_box': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_ballots_inside_box': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_ballots_outside_box': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_ballots_received': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_cancelled_ballots': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_invalid_votes': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_signatures_in_vr': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_sorted_and_counted': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_spoiled_ballots': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_unstamped_ballots': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_unused_ballots': ('django.db.models.fields.PositiveIntegerField', [], {}),
'number_valid_votes': ('django.db.models.fields.PositiveIntegerField', [], {}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.ResultForm']"}),
'signature_dated': ('django.db.models.fields.BooleanField', [], {}),
'signature_polling_officer_1': ('django.db.models.fields.BooleanField', [], {}),
'signature_polling_officer_2': ('django.db.models.fields.BooleanField', [], {}),
'signature_polling_station_chair': ('django.db.models.fields.BooleanField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'tally.result': {
'Meta': {'object_name': 'Result'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'candidate': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'candidates'", 'to': "orm['tally.Candidate']"}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'entry_version': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'result_form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'to': "orm['tally.ResultForm']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'}),
'votes': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'tally.resultform': {
'Meta': {'object_name': 'ResultForm'},
'audited_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'ballot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.Ballot']", 'null': 'True'}),
'barcode': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True'}),
'center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.Center']", 'null': 'True', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_user'", 'null': 'True', 'to': u"orm['auth.User']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'form_stamped': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'form_state': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'gender': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True'}),
'office': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tally.Office']", 'null': 'True'}),
'rejected_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'serial_number': ('django.db.models.fields.PositiveIntegerField', [], {'unique': 'True', 'null': 'True'}),
'skip_quarantine_checks': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'station_number': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True'})
},
'tally.station': {
'Meta': {'object_name': 'Station'},
'center': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stations'", 'to': "orm['tally.Center']"}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'gender': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'registrants': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'station_number': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'sub_constituency': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stations'", 'to': "orm['tally.SubConstituency']"})
},
'tally.subconstituency': {
'Meta': {'object_name': 'SubConstituency'},
'ballot_general': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sc_general'", 'null': 'True', 'to': "orm['tally.Ballot']"}),
'ballot_women': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sc_women'", 'null': 'True', 'to': "orm['tally.Ballot']"}),
'code': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'component_ballot': ('django.db.models.fields.BooleanField', [], {}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'field_office': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'number_of_ballots': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'races': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'})
}
}
complete_apps = ['tally']
| 77.874786
| 195
| 0.613643
| 5,008
| 45,401
| 5.423323
| 0.043131
| 0.107511
| 0.187629
| 0.268041
| 0.88634
| 0.84746
| 0.840648
| 0.799742
| 0.728756
| 0.635052
| 0
| 0.002907
| 0.174137
| 45,401
| 582
| 196
| 78.008591
| 0.721456
| 0.022665
| 0
| 0.378698
| 0
| 0
| 0.543283
| 0.360835
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003945
| false
| 0.013807
| 0.00789
| 0
| 0.017751
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dd2f8124bd2e6ede38720a6ebd2705c667a9dd41
| 135
|
py
|
Python
|
Chapter09/readcount1.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
Chapter09/readcount1.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
Chapter09/readcount1.py
|
LuisPereda/Learning_Python
|
e89e69346c5584be10d991010f39b59329793ba5
|
[
"MIT"
] | null | null | null |
file_input = open("sample1.txt",'r')
print file_input.read(20)
print file_input.read(15)
print file_input.read(10)
file_input.close()
| 27
| 37
| 0.762963
| 24
| 135
| 4.083333
| 0.5
| 0.459184
| 0.428571
| 0.55102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056452
| 0.081481
| 135
| 5
| 38
| 27
| 0.733871
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.6
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
dd451d56eec2968de04dc303279d5ddaac0efed6
| 5,391
|
py
|
Python
|
tests/tests/test_docker_network.py
|
housewares/agent
|
ba54bf145038e7fec723a44299e85869b8600ce1
|
[
"Apache-2.0"
] | null | null | null |
tests/tests/test_docker_network.py
|
housewares/agent
|
ba54bf145038e7fec723a44299e85869b8600ce1
|
[
"Apache-2.0"
] | null | null | null |
tests/tests/test_docker_network.py
|
housewares/agent
|
ba54bf145038e7fec723a44299e85869b8600ce1
|
[
"Apache-2.0"
] | 2
|
2021-01-10T13:27:22.000Z
|
2021-01-11T14:19:50.000Z
|
from common import delete_container, event_test, trim, docker_client, \
JsonObject
def test_network_mode_none(agent):
delete_container('/c861f990-4472-4fa1-960f-65171b544c28')
def pre(req):
instance = req['data']['instanceHostMap']['instance']
instance['nics'][0]['network']['kind'] = 'dockerNone'
instance['hostname'] = 'nameisset'
def post(req, resp, valid_resp):
instance_data = resp['data']['instanceHostMap']['instance']['+data']
docker_inspect = instance_data['dockerInspect']
assert docker_inspect['Config']['NetworkDisabled']
assert docker_inspect['HostConfig']['NetworkMode'] == 'none'
assert docker_inspect['Config']['Hostname'] == 'nameisset'
docker_container = instance_data['dockerContainer']
fields = instance_data['+fields']
trim(docker_container, fields, resp, valid_resp)
event_test(agent, 'docker/instance_activate', pre_func=pre,
post_func=post, diff=False)
def test_network_mode_host(agent):
delete_container('/c861f990-4472-4fa1-960f-65171b544c28')
def pre(req):
instance = req['data']['instanceHostMap']['instance']
instance['nics'][0]['network']['kind'] = 'dockerHost'
instance['hostname'] = 'nameisset'
def post(req, resp, valid_resp):
instance_data = resp['data']['instanceHostMap']['instance']['+data']
docker_inspect = instance_data['dockerInspect']
# networkDisabled doesn't exist when mode is set to host
assert 'NetworkDisabled' not in docker_inspect['Config']
assert docker_inspect['HostConfig']['NetworkMode'] == 'host'
assert docker_inspect['Config']['Hostname'] != 'nameisset'
docker_container = instance_data['dockerContainer']
fields = instance_data['+fields']
trim(docker_container, fields, resp, valid_resp)
event_test(agent, 'docker/instance_activate', pre_func=pre,
post_func=post, diff=False)
def test_network_mode_container_with_mac_and_hostname(agent):
delete_container('/network-container')
delete_container('/c861f990-4472-4fa1-960f-65171b544c28')
client = docker_client()
c = client.create_container('ibuildthecloud/helloworld',
name='network-container')
client.start(c)
def pre(req):
instance = req['data']['instanceHostMap']['instance']
instance['hostname'] = 'no set'
instance['nics'][0]['network']['kind'] = 'dockerContainer'
instance['networkContainer'] = JsonObject({
'uuid': 'network-container'
})
def post(req, resp, valid_resp):
instance_data = resp['data']['instanceHostMap']['instance']['+data']
docker_inspect = instance_data['dockerInspect']
assert 'MacAddress' not in docker_inspect['Config']
assert docker_inspect['Config']['Hostname'] != 'no set'
assert docker_inspect['HostConfig']['NetworkMode'] == \
'container:{}'.format(c['Id'])
docker_container = instance_data['dockerContainer']
fields = instance_data['+fields']
trim(docker_container, fields, resp, valid_resp)
event_test(agent, 'docker/instance_activate', pre_func=pre,
post_func=post, diff=False)
def test_network_mode_container(agent):
delete_container('/network-container')
delete_container('/c861f990-4472-4fa1-960f-65171b544c28')
client = docker_client()
c = client.create_container('ibuildthecloud/helloworld',
name='network-container')
client.start(c)
def pre(req):
instance = req['data']['instanceHostMap']['instance']
instance['nics'][0]['network']['kind'] = 'dockerContainer'
instance['networkContainer'] = JsonObject({
'uuid': 'network-container'
})
def post(req, resp, valid_resp):
instance_data = resp['data']['instanceHostMap']['instance']['+data']
docker_inspect = instance_data['dockerInspect']
assert 'NetworkDisabled' not in docker_inspect['Config']
assert docker_inspect['HostConfig']['NetworkMode'] == \
'container:{}'.format(c['Id'])
docker_container = instance_data['dockerContainer']
fields = instance_data['+fields']
trim(docker_container, fields, resp, valid_resp)
event_test(agent, 'docker/instance_activate', pre_func=pre,
post_func=post, diff=False)
def test_network_mode_bridge(agent):
delete_container('/c861f990-4472-4fa1-960f-65171b544c28')
def pre(req):
instance = req['data']['instanceHostMap']['instance']
instance['nics'][0]['network']['kind'] = 'dockerBridge'
def post(req, resp, valid_resp):
instance_data = resp['data']['instanceHostMap']['instance']['+data']
docker_inspect = instance_data['dockerInspect']
docker_data = instance_data['dockerContainer']
assert 'NetworkDisabled' not in docker_inspect['Config']
assert len(docker_data['Ports']) == 1
assert docker_data['Ports'][0]["PublicPort"] == 100
docker_container = instance_data['dockerContainer']
fields = instance_data['+fields']
trim(docker_container, fields, resp, valid_resp)
event_test(agent, 'docker/instance_activate_bridge', pre_func=pre,
post_func=post, diff=False)
| 39.350365
| 76
| 0.652013
| 561
| 5,391
| 6.060606
| 0.144385
| 0.091765
| 0.079412
| 0.026471
| 0.894118
| 0.878235
| 0.878235
| 0.878235
| 0.841471
| 0.841471
| 0
| 0.031476
| 0.204415
| 5,391
| 136
| 77
| 39.639706
| 0.76125
| 0.010017
| 0
| 0.786408
| 0
| 0
| 0.277788
| 0.067854
| 0
| 0
| 0
| 0
| 0.135922
| 1
| 0.145631
| false
| 0
| 0.009709
| 0
| 0.15534
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
06eff3800fc1aa1e30983fd8f9dc466bd6ad39da
| 2,508
|
py
|
Python
|
utils/regex.py
|
abaruchi/WhoIsConnected
|
531888aaeace41d56b16073a05db0dce8e469e22
|
[
"MIT"
] | null | null | null |
utils/regex.py
|
abaruchi/WhoIsConnected
|
531888aaeace41d56b16073a05db0dce8e469e22
|
[
"MIT"
] | 32
|
2019-02-04T16:18:45.000Z
|
2020-09-11T18:48:14.000Z
|
utils/regex.py
|
abaruchi/WhoIsConnected
|
531888aaeace41d56b16073a05db0dce8e469e22
|
[
"MIT"
] | 1
|
2019-02-04T15:48:25.000Z
|
2019-02-04T15:48:25.000Z
|
"""
This file contains regex used by the system. Any regex should be added to
this file
"""
class MySystemRegex(object):
def ipv4_regex(self):
return r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$'
def ipv6_regex(self):
return (r"(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:"
r"[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|"
r"2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|"
r"1[0-9]{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:"
r"){5}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:"
r"[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}("
r"?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])"
r")|(?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}("
r"?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:"
r"[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}("
r"?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])"
r")|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?"
r":[0-9A-Fa-f]{1,4}:){3}(?:[0-9A-Fa-f]{1,4}:"
r"[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1"
r"[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:"
r"[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:"
r"(?:[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::("
r"?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]{1,4}:"
r"[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1"
r"[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?"
r":[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:"
r"(?:[0-9A-Fa-f]{1,4}:){,3}[0-9A-Fa-f]{1,4})?:"
r":[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:"
r"[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|"
r"2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1"
r"[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:"
r"[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::(?:"
r"[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:"
r"(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|"
r"25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|"
r"2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:"
r"){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:"
r"[0-9A-Fa-f]{1,4}:){,6}[0-9A-Fa-f]{1,4})?::)")
def mac_addr(self):
return r"([0-9a-f]{2}[:-]){5}([0-9a-f]{2})"
| 51.183673
| 75
| 0.307416
| 563
| 2,508
| 1.364121
| 0.065719
| 0.145833
| 0.221354
| 0.265625
| 0.809896
| 0.778646
| 0.778646
| 0.778646
| 0.778646
| 0.763021
| 0
| 0.234535
| 0.258772
| 2,508
| 48
| 76
| 52.25
| 0.178591
| 0.033094
| 0
| 0.205128
| 0
| 0.871795
| 0.651634
| 0.651634
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0
| 0.076923
| 0.179487
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
06fd9b030ee3d4ce86562803b1d8d9c01df8ebda
| 12,161
|
py
|
Python
|
okr/scrapers/youtube/google.py
|
wdr-data/wdr-okr
|
71c9e6e8d3521b1bb67d30310a93584389de2127
|
[
"MIT"
] | 2
|
2021-07-28T08:46:13.000Z
|
2022-01-19T17:05:48.000Z
|
okr/scrapers/youtube/google.py
|
wdr-data/wdr-okr
|
71c9e6e8d3521b1bb67d30310a93584389de2127
|
[
"MIT"
] | 3
|
2020-11-10T23:34:17.000Z
|
2021-03-31T16:19:21.000Z
|
okr/scrapers/youtube/google.py
|
wdr-data/wdr-okr
|
71c9e6e8d3521b1bb67d30310a93584389de2127
|
[
"MIT"
] | null | null | null |
""" Methods for scraping YouTube data with Quintly """
import datetime as dt
from typing import Generator, Optional
from google.cloud import bigquery
import numpy as np
import pandas as pd
from ..common import utils
from ..common.google import bigquery_client, insert_table_name, iter_results
def get_bigquery_basic(
bigquery_suffix: str,
*,
start_date: Optional[dt.date] = None,
end_date: Optional[dt.date] = None,
) -> Generator[pd.Series, None, None]:
"""Read YouTube Video data from BigQuery.
Args:
bigquery_suffix (str): BigQuery dataset/table suffix.
start_date (Optional[dt.date], optional): Date of earliest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to 7 days if None.
end_date (Optional[dt.date], optional): Date of latest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to today if None.
Returns:
pd.DataFrame: BigQuery response data.
"""
today = utils.local_today()
if start_date is None:
start_date = today - dt.timedelta(days=7)
if end_date is None:
end_date = today
query = """
SELECT
`date`,
`video_id`,
`live_or_on_demand`,
SUM(`views`) AS `views`,
SUM(`likes`) AS `likes`,
SUM(`dislikes`) AS `dislikes`,
SUM(`comments`) AS `comments`,
SUM(`shares`) AS `shares`,
SUM(`subscribers_gained`) AS `subscribers_gained`,
SUM(`subscribers_lost`) AS `subscribers_lost`,
SUM(`watch_time_minutes`) AS `watch_time_minutes`
FROM
`@table_name`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`date`,
`video_id`,
`live_or_on_demand`
""".strip()
query = insert_table_name(query, "p_channel_basic_a2_", bigquery_suffix)
job_config = bigquery.QueryJobConfig(
default_dataset=f"wdr-okr.youtube_channel_{bigquery_suffix}",
query_parameters=[
bigquery.ScalarQueryParameter("start_date", "DATE", start_date.isoformat()),
bigquery.ScalarQueryParameter("end_date", "DATE", end_date.isoformat()),
],
)
def df_cleaner(df: pd.DataFrame) -> pd.DataFrame:
# Convert to date
df.date = pd.to_datetime(df.date).dt.date
return df.replace({np.nan: None})
yield from iter_results(
bigquery_client,
query,
job_config,
df_cleaner,
)
def get_bigquery_traffic_source(
bigquery_suffix: str,
start_date: dt.date,
*,
end_date: Optional[dt.date] = None,
) -> Generator[pd.Series, None, None]:
"""Read YouTube Video traffic source data from BigQuery.
Args:
bigquery_suffix (str): BigQuery dataset/table suffix.
start_date (dt.date): Date of earliest data to
request. This date refers to the partition field value, not the date
of the data itself.
end_date (Optional[dt.date], optional): Date of latest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to today if None.
Yields:
pd.Series: BigQuery response data.
"""
if end_date is None:
end_date = utils.local_today()
query = """
SELECT
`video_id`,
`traffic_source_type`,
SUM(`views`) AS `views`,
SUM(`watch_time_minutes`) AS `watch_time_minutes`
FROM
`@table_name`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`video_id`,
`traffic_source_type`
""".strip()
query = insert_table_name(query, "p_channel_traffic_source_a2_", bigquery_suffix)
job_config = bigquery.QueryJobConfig(
default_dataset=f"wdr-okr.youtube_channel_{bigquery_suffix}",
query_parameters=[
bigquery.ScalarQueryParameter("start_date", "DATE", start_date.isoformat()),
bigquery.ScalarQueryParameter("end_date", "DATE", end_date.isoformat()),
],
)
def df_cleaner(df: pd.DataFrame) -> pd.DataFrame:
return df.replace({np.nan: None})
yield from iter_results(
bigquery_client,
query,
job_config,
df_cleaner,
)
def get_bigquery_search_terms(
bigquery_suffix: str,
start_date: dt.date,
*,
end_date: Optional[dt.date] = None,
) -> Generator[pd.Series, None, None]:
"""Read YouTube Video search term data from BigQuery.
Args:
bigquery_suffix (str): BigQuery dataset/table suffix.
start_date (dt.date): Date of earliest data to
request. This date refers to the partition field value, not the date
of the data itself.
end_date (Optional[dt.date], optional): Date of latest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to today if None.
Yields:
pd.Series: BigQuery response data.
"""
if end_date is None:
end_date = utils.local_today()
query = """
SELECT
`video_id`,
`traffic_source_detail`,
SUM(`views`) AS `views`,
SUM(`watch_time_minutes`) AS `watch_time_minutes`
FROM
`@table_name`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `traffic_source_type` = 5
AND `traffic_source_detail` IS NOT NULL
AND `video_id` IS NOT NULL
GROUP BY
`video_id`,
`traffic_source_detail`
""".strip()
query = insert_table_name(query, "p_channel_traffic_source_a2_", bigquery_suffix)
job_config = bigquery.QueryJobConfig(
default_dataset=f"wdr-okr.youtube_channel_{bigquery_suffix}",
query_parameters=[
bigquery.ScalarQueryParameter("start_date", "DATE", start_date.isoformat()),
bigquery.ScalarQueryParameter("end_date", "DATE", end_date.isoformat()),
],
)
def df_cleaner(df: pd.DataFrame) -> pd.DataFrame:
return df.replace({np.nan: None})
yield from iter_results(
bigquery_client,
query,
job_config,
df_cleaner,
)
def get_bigquery_external_traffic(
bigquery_suffix: str,
start_date: dt.date,
*,
end_date: Optional[dt.date] = None,
) -> Generator[pd.Series, None, None]:
"""Read YouTube Video external traffic data from BigQuery.
Args:
bigquery_suffix (str): BigQuery dataset/table suffix.
start_date (dt.date): Date of earliest data to
request. This date refers to the partition field value, not the date
of the data itself.
end_date (Optional[dt.date], optional): Date of latest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to today if None.
Yields:
pd.Series: BigQuery response data.
"""
if end_date is None:
end_date = utils.local_today()
query = """
SELECT
`video_id`,
`traffic_source_detail`,
SUM(`views`) AS `views`,
SUM(`watch_time_minutes`) AS `watch_time_minutes`
FROM
`@table_name`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `traffic_source_type` = 9
AND `traffic_source_detail` IS NOT NULL
AND `video_id` IS NOT NULL
GROUP BY
`video_id`,
`traffic_source_detail`
""".strip()
query = insert_table_name(query, "p_channel_traffic_source_a2_", bigquery_suffix)
job_config = bigquery.QueryJobConfig(
default_dataset=f"wdr-okr.youtube_channel_{bigquery_suffix}",
query_parameters=[
bigquery.ScalarQueryParameter("start_date", "DATE", start_date.isoformat()),
bigquery.ScalarQueryParameter("end_date", "DATE", end_date.isoformat()),
],
)
def df_cleaner(df: pd.DataFrame) -> pd.DataFrame:
return df.replace({np.nan: None})
yield from iter_results(
bigquery_client,
query,
job_config,
df_cleaner,
)
def get_bigquery_video_demographics(
bigquery_suffix: str,
start_date: dt.date,
*,
end_date: Optional[dt.date] = None,
) -> Generator[pd.Series, None, None]:
"""Read YouTube Video demographics data from BigQuery.
Args:
bigquery_suffix (str): BigQuery dataset/table suffix.
start_date (dt.date): Date of earliest data to
request. This date refers to the partition field value, not the date
of the data itself.
end_date (Optional[dt.date], optional): Date of latest data to
request. This date refers to the partition field value, not the date
of the data itself. Defaults to None. Will be set to today if None.
Yields:
pd.Series: BigQuery response data.
"""
if end_date is None:
end_date = utils.local_today()
query = """
SELECT
`video_id`,
`age_group`,
`gender`,
SUM(`views`) / MAX(`total_video_views`) * 100 AS `views_percentage`,
FROM (
SELECT
`demo`.`date`,
`demo`.`video_id`,
`age_group`,
`gender`,
`views_percentage`,
`views` AS `views_day_total`,
`views` * `views_percentage` / 100 AS `views`,
`total_video_views`
FROM (
SELECT
`date`,
`video_id`,
`age_group`,
`gender`,
SUM(`views_percentage`) AS `views_percentage`
FROM
`@table_name_demo`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`date`,
`video_id`,
`age_group`,
`gender` ) `demo`
LEFT JOIN (
SELECT
`date`,
`video_id`,
SUM(`views`) AS `views`
FROM
`@table_name_basic`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`date`,
`video_id` ) `basic`
ON
`basic`.`date` = `demo`.`date`
AND `basic`.`video_id` = `demo`.`video_id`
-- Stupid but works
-- We need the total video views that influence the calculation
-- We join the basic table into the demographics table to drop
-- dates where no demographics data is available.
LEFT JOIN (
SELECT
`demo`.`video_id`,
SUM(`views`) AS `total_video_views`
FROM (
SELECT
`date`,
`video_id`
FROM
`@table_name_demo`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`date`,
`video_id` ) `demo`
LEFT JOIN (
SELECT
`date`,
`video_id`,
SUM(`views`) AS `views`
FROM
`@table_name_basic`
WHERE
DATE(_PARTITIONTIME) >= @start_date
AND DATE(_PARTITIONTIME) <= @end_date
AND `video_id` IS NOT NULL
GROUP BY
`date`,
`video_id` ) `basic`
ON
`basic`.`date` = `demo`.`date`
AND `basic`.`video_id` = `demo`.`video_id`
GROUP BY
`demo`.`video_id` ) `total`
ON
`total`.`video_id` = `demo`.`video_id`
)
GROUP BY
`video_id`,
`age_group`,
`gender`
ORDER BY
`video_id`,
`gender`,
`age_group`;
""".strip()
query = insert_table_name(
query,
"p_channel_demographics_a1_",
bigquery_suffix,
placeholder="@table_name_demo",
)
query = insert_table_name(
query,
"p_channel_basic_a2_",
bigquery_suffix,
placeholder="@table_name_basic",
)
job_config = bigquery.QueryJobConfig(
default_dataset=f"wdr-okr.youtube_channel_{bigquery_suffix}",
query_parameters=[
bigquery.ScalarQueryParameter("start_date", "DATE", start_date.isoformat()),
bigquery.ScalarQueryParameter("end_date", "DATE", end_date.isoformat()),
],
)
def df_cleaner(df: pd.DataFrame) -> pd.DataFrame:
return df.replace({np.nan: None})
yield from iter_results(
bigquery_client,
query,
job_config,
df_cleaner,
)
| 27.575964
| 88
| 0.639092
| 1,547
| 12,161
| 4.807369
| 0.100194
| 0.035767
| 0.02259
| 0.029044
| 0.855587
| 0.831518
| 0.82856
| 0.80718
| 0.791717
| 0.791717
| 0
| 0.001763
| 0.253598
| 12,161
| 440
| 89
| 27.638636
| 0.817561
| 0.229751
| 0
| 0.788162
| 0
| 0
| 0.494182
| 0.082986
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031153
| false
| 0
| 0.021807
| 0.012461
| 0.068536
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
660bf3f87252f10baf3b0087db1aa92325d9a112
| 3,047
|
py
|
Python
|
tests/python/test_local_atomics.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 11,699
|
2020-01-09T03:02:46.000Z
|
2022-03-31T20:59:08.000Z
|
tests/python/test_local_atomics.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 3,589
|
2020-01-09T03:18:25.000Z
|
2022-03-31T19:06:42.000Z
|
tests/python/test_local_atomics.py
|
kxxt/taichi
|
15f39b79c258080f1e34fcbdc29646d9ced0a4fe
|
[
"MIT"
] | 1,391
|
2020-01-09T03:02:54.000Z
|
2022-03-31T08:44:29.000Z
|
import taichi as ti
@ti.test()
def test_explicit_local_atomic_add():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_add(a, i)
A[None] = a
func()
assert A[None] == 45
@ti.test()
def test_implicit_local_atomic_add():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a += i
A[None] = a
func()
assert A[None] == 45
@ti.test()
def test_explicit_local_atomic_sub():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_sub(a, i)
A[None] = a
func()
assert A[None] == -45
@ti.test()
def test_implicit_local_atomic_sub():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a -= i
A[None] = a
func()
assert A[None] == -45
@ti.test()
def test_explicit_local_atomic_min():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = 1000
for i in range(10):
ti.atomic_min(a, i)
A[None] = a
func()
assert A[None] == 0
@ti.test()
def test_explicit_local_atomic_max():
A = ti.field(ti.f32, shape=())
@ti.kernel
def func():
a = -1000
for i in range(10):
ti.atomic_max(a, i)
A[None] = a
func()
assert A[None] == 9
@ti.test()
def test_explicit_local_atomic_and():
A = ti.field(ti.i32, shape=())
max_int = 2147483647
@ti.kernel
def func():
a = 1023
for i in range(10):
ti.atomic_and(a, max_int - 2**i)
A[None] = a
func()
assert A[None] == 0
@ti.test()
def test_implicit_local_atomic_and():
A = ti.field(ti.i32, shape=())
max_int = 2147483647
@ti.kernel
def func():
a = 1023
for i in range(10):
a &= max_int - 2**i
A[None] = a
func()
assert A[None] == 0
@ti.test()
def test_explicit_local_atomic_or():
A = ti.field(ti.i32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
ti.atomic_or(a, 2**i)
A[None] = a
func()
assert A[None] == 1023
@ti.test()
def test_implicit_local_atomic_or():
A = ti.field(ti.i32, shape=())
@ti.kernel
def func():
a = 0
for i in range(10):
a |= 2**i
A[None] = a
func()
assert A[None] == 1023
@ti.test()
def test_explicit_local_atomic_xor():
A = ti.field(ti.i32, shape=())
@ti.kernel
def func():
a = 1023
for i in range(10):
ti.atomic_xor(a, 2**i)
A[None] = a
func()
assert A[None] == 0
@ti.test()
def test_implicit_local_atomic_xor():
A = ti.field(ti.i32, shape=())
@ti.kernel
def func():
a = 1023
for i in range(10):
a ^= 2**i
A[None] = a
func()
assert A[None] == 0
| 16.559783
| 44
| 0.495898
| 461
| 3,047
| 3.149675
| 0.084599
| 0.082645
| 0.07438
| 0.107438
| 0.970386
| 0.970386
| 0.970386
| 0.930441
| 0.930441
| 0.913912
| 0
| 0.063765
| 0.351493
| 3,047
| 183
| 45
| 16.650273
| 0.671053
| 0
| 0
| 0.792593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088889
| 1
| 0.177778
| false
| 0
| 0.007407
| 0
| 0.185185
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6637b85a116f6e2a55c4aa56b858f2d11e3837c8
| 10,375
|
py
|
Python
|
Scheduler/SchedulingAlgos.py
|
dsdn/tssdn
|
bc4f320d7d483d3852597c44676060aa70e8b2b2
|
[
"Apache-2.0"
] | 4
|
2021-11-13T11:38:02.000Z
|
2022-01-21T19:05:26.000Z
|
Scheduler/SchedulingAlgos.py
|
dsdn/tssdn
|
bc4f320d7d483d3852597c44676060aa70e8b2b2
|
[
"Apache-2.0"
] | null | null | null |
Scheduler/SchedulingAlgos.py
|
dsdn/tssdn
|
bc4f320d7d483d3852597c44676060aa70e8b2b2
|
[
"Apache-2.0"
] | null | null | null |
#! /usr/bin/python
'''
Scheduling algorithms for TSSDN.
Implements two interface functions -
ShortestAvailablePath - Schedules over shortest available path
MiniMax - Schedules using MiniMax algorithm
Author - Naresh Nayak
Date - 15.02.2016
'''
from pulp import *
import time
import numpy as np
current_time = lambda: int(round(time.time() * 1000))
class SchedulingAlgos():
# Constructor
def setTopoTrim(self, trim):
self.trimTopo = trim
# Scheduling function - ShortestAvailablePath
def ShortestAvailablePath(self, topo, src, dst, ls):
numDst = len(dst)
# Number of slots on which scheduling is to be done
timeSlots = ls.keys()
# Optimization 1 - Trim the topology
if self.trimTopo == 1:
nodes = topo.nodes()
for n in nodes:
if topo.degree(n) <= 2 and (n != src) and (n not in dst):
topo.remove_node(n)
links = topo.edges()
for (n1, n2) in links:
if n1 in dst or n2 == src: topo.remove_edge(n1,n2)
nodes = topo.nodes()
links = topo.edges()
# Create the ILP
ilpCreation = -current_time()
ilpProb = LpProblem("ShortestAvailablePath", LpMinimize)
# Decision Variables
# 1. Variables for link usage
decVarLinks = LpVariable.dicts("LinkDec", links, cat = pulp.LpBinary)
# 2. Variables for denote the number of intended destinations for the packet on the link.
decVarLinkLoads = LpVariable.dicts("LinkLoadsDec", links, lowBound = 0, upBound = numDst, cat = pulp.LpInteger)
# 3. Variables for slots
decVarSlots = LpVariable.dicts("SlotDec", timeSlots, cat = pulp.LpBinary)
# 4. Variables representing the auxiliary variables
decVar = []
for l in links:
for t in timeSlots:
decVar.append((l,t))
decVarLinkSlots = LpVariable.dicts("LinkSlotDec", decVar, cat = pulp.LpBinary)
# Objective function
# Minimize the length of the path on which the flow is routed
ilpProb += sum(decVarLinks[l] for l in links)
# Constraints
# 1. Only one time-slot to be allocated
ilpProb += sum(decVarSlots[s] for s in timeSlots) == 1, "Only one slot"
# 2. For all nodes (except src and dst), sum of outgoing edges equal to incoming edges
for n in nodes:
outEdges = topo.out_edges(n)
inEdges = topo.in_edges(n)
if n == src:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == numDst, "Sum Out-Edges - "+n
ilpProb += sum(decVarLinkLoads[e] for e in inEdges) == 0, "Sum In-Edges - "+n
elif n in dst:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == 0, "Sum Out-Edges - "+n
ilpProb += sum(decVarLinkLoads[e] for e in inEdges) == 1, "Sum In-Edges - "+n
else:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == sum(decVarLinkLoads[e] for e in inEdges), "Sum Edges - "+n
# 3. Get which links are to be used based on the number of destinations for the packet
for e in links:
ilpProb += decVarLinks[e] >= decVarLinkLoads[e] / float(numDst)
# 4. Relation between decVarLinks, decVarSlots and decVarLinkSlots
for l in links:
for t in timeSlots:
ilpProb += decVarLinkSlots[(l,t)] <= decVarLinks[l],"Linear Constraint 1 - " + str((l,t))
ilpProb += decVarLinkSlots[(l,t)] <= decVarSlots[t],"Linear Constraint 2 - " + str((l,t))
ilpProb += decVarLinkSlots[(l,t)] >= decVarLinks[l] + decVarSlots[t] - 1, "Linear Constraint 3 - " + str((l,t))
# 5. No collision on links
for t in timeSlots:
for l in links:
ilpProb += ls[t][l] + decVarLinkSlots[(l,t)] <= 1, "Link collision - " + str(t) + " " + str(l)
ilpCreation += current_time()
# Solve the ILP
ilpSolution = -current_time()
ilpProb.solve(CPLEX(msg=0))
ilpSolution += current_time()
slotAlloc = (0,0)
# If the ILP is feasibly solved, we extract the allocated time-slot and the path
if ilpProb.status == 1:
route = []
# For extraction of the values from the decision variables, we need an ugly hack.
# Instead of checking if the assigned value by CPLEX is 1, we check if it is greater than 0.5.
# This is because we observed that occassionally few variables were assigned real numbers (very low exponents) despite the
# ILP specifying otherwise. It might be a CPLEX bug or PuLP bug. So for now we use 0.5 for our checks.
for t in timeSlots:
if decVarSlots[t].varValue > 0.5:
slotAlloc = t
for l in links:
if decVarLinks[l].varValue > 0.5:
route.append(l)
return (route, slotAlloc, ilpCreation, ilpSolution)
else:
return ([ilpProb.status], slotAlloc, ilpCreation, ilpSolution)
# Scheduling function - ShortestAvailablePath
def MiniMax(self, topo, src, dst, ls):
numDst = len(dst)
# Number of slots on which scheduling is to be done
timeSlots = ls.keys()
# Optimization 1 - Trim the topology
if self.trimTopo == 1:
nodes = topo.nodes()
for n in nodes:
if topo.degree(n) <= 2 and (n != src) and (n not in dst):
topo.remove_node(n)
links = topo.edges()
for (n1, n2) in links:
if n1 in dst or n2 == src: topo.remove_edge(n1,n2)
nodes = topo.nodes()
links = topo.edges()
# Create the ILP
ilpCreation = -current_time()
ilpProb = LpProblem("Mini-Max", LpMinimize)
# Decision Variables
# 1. Variables for link usage
decVarLinks = LpVariable.dicts("LinkDec", links, cat = pulp.LpBinary)
# 2. Variables for denote the number of intended destinations for the packet on the link.
decVarLinkLoads = LpVariable.dicts("LinkLoadsDec", links, lowBound = 0, upBound = numDst, cat = pulp.LpInteger)
# 3. Variables for slots
decVarSlots = LpVariable.dicts("SlotDec", timeSlots, cat = pulp.LpBinary)
# 4. Variables representing the auxiliary variables
decVar = []
for l in links:
for t in timeSlots:
decVar.append((l,t))
decVarLinkSlots = LpVariable.dicts("LinkSlotDec", decVar, cat = pulp.LpBinary)
# 5. Decision variable for maximum load
decVarMaxLoad = LpVariable("MaxLoad", lowBound = 0, upBound = len(timeSlots), cat = pulp.LpInteger)
# Objective function
# Minimize the length of the path on which the flow is routed
ilpProb += decVarMaxLoad + round(1/float(len(links)), 5) * (sum(decVarLinks[l] for l in links))
# Constraints
# 1. Only one time-slot to be allocated
ilpProb += sum(decVarSlots[s] for s in timeSlots) == 1, "Only one slot"
# 2. For all nodes (except src and dst), sum of outgoing edges equal to incoming edges
for n in nodes:
outEdges = topo.out_edges(n)
inEdges = topo.in_edges(n)
if n == src:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == numDst, "Sum Out-Edges - "+n
ilpProb += sum(decVarLinkLoads[e] for e in inEdges) == 0, "Sum In-Edges - "+n
elif n in dst:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == 0, "Sum Out-Edges - "+n
ilpProb += sum(decVarLinkLoads[e] for e in inEdges) == 1, "Sum In-Edges - "+n
else:
ilpProb += sum(decVarLinkLoads[e] for e in outEdges) == sum(decVarLinkLoads[e] for e in inEdges), "Sum Edges - "+n
# 3. Get which links are to be used based on the number of destinations for the packet
for e in links:
ilpProb += decVarLinks[e] >= decVarLinkLoads[e] / float(numDst)
# 4. Relation between decVarLinks, decVarSlots and decVarLinkSlots
for l in links:
for t in timeSlots:
ilpProb += decVarLinkSlots[(l,t)] <= decVarLinks[l],"Linear Constraint 1 - " + str((l,t))
ilpProb += decVarLinkSlots[(l,t)] <= decVarSlots[t],"Linear Constraint 2 - " + str((l,t))
ilpProb += decVarLinkSlots[(l,t)] >= decVarLinks[l] + decVarSlots[t] - 1, "Linear Constraint 3 - " + str((l,t))
# 5. No collision on links
for t in timeSlots:
for l in links:
ilpProb += ls[t][l] + decVarLinkSlots[(l,t)] <= 1, "Link collision - " + str(t) + " " + str(l)
# 6. Load on each link must be less than the maximum load
for l in links:
(l1, l2) = l
if l1 != src and l2 not in dst:
ilpProb += sum([ls[t][l] for t in timeSlots]) + decVarLinks[l] <= decVarMaxLoad, "Link Loads - "+str(l)
ilpCreation += current_time()
# Solve the ILP
ilpSolution = -current_time()
ilpProb.solve(CPLEX(msg=0))
ilpSolution += current_time()
slotAlloc = (0,0)
# If the ILP is feasibly solved, we extract the allocated time-slot and the path
if ilpProb.status == 1:
route = []
# For extraction of the values from the decision variables, we need an ugly hack.
# Instead of checking if the assigned value by CPLEX is 1, we check if it is greater than 0.5.
# This is because we observed that occassionally few variables were assigned real numbers (very low exponents) despite the
# ILP specifying otherwise. It might be a CPLEX bug or PuLP bug. So for now we use 0.5 for our checks.
for t in timeSlots:
if decVarSlots[t].varValue > 0.5:
slotAlloc = t
for l in links:
if decVarLinks[l].varValue > 0.5:
route.append(l)
return (route, slotAlloc, ilpCreation, ilpSolution)
else:
return ([ilpProb.status], slotAlloc, ilpCreation, ilpSolution)
| 43.410042
| 135
| 0.581012
| 1,298
| 10,375
| 4.630971
| 0.172573
| 0.005324
| 0.013974
| 0.043919
| 0.874064
| 0.874064
| 0.874064
| 0.874064
| 0.874064
| 0.874064
| 0
| 0.014637
| 0.321735
| 10,375
| 238
| 136
| 43.592437
| 0.839562
| 0.272578
| 0
| 0.875
| 0
| 0
| 0.062083
| 0.002804
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022059
| false
| 0
| 0.022059
| 0
| 0.080882
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b080a868b8e83c92758f2e1bf886d97e8c478608
| 3,779
|
py
|
Python
|
test/nn/test_disentangle.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 356
|
2019-11-22T10:37:22.000Z
|
2022-03-25T14:42:45.000Z
|
test/nn/test_disentangle.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 52
|
2020-01-20T16:51:36.000Z
|
2022-03-31T21:40:19.000Z
|
test/nn/test_disentangle.py
|
steven-lang/e2cnn
|
48f49760766ec958b52d0dd7b02483886dfa2096
|
[
"BSD-3-Clause"
] | 48
|
2019-12-11T09:29:30.000Z
|
2022-03-18T17:51:55.000Z
|
import unittest
from unittest import TestCase
from e2cnn.gspaces import *
from e2cnn.nn import *
from e2cnn.group import directsum
import numpy as np
from scipy.stats import ortho_group
class TestDisentangle(TestCase):
def test_regular_cyclic(self):
space = Rot2dOnR2(6)
g = space.fibergroup
rr = g.regular_representation
N = 4
size = rr.size * N
p = np.eye(size, size)
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
def test_regular_dihedral(self):
space = FlipRot2dOnR2(5)
g = space.fibergroup
rr = g.regular_representation
N = 4
size = rr.size * N
p = np.eye(size, size)
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
def test_mix_cyclic(self):
space = Rot2dOnR2(6)
g = space.fibergroup
rr = directsum(list(g.representations.values()))
N = 3
size = rr.size * N
bcob = ortho_group.rvs(dim=size//5)
bsize = bcob.shape[0]
p = np.eye(size, size)
for i in range(size//bsize):
p[i*bsize:(i+1)*bsize, i*bsize:(i+1)*bsize] = bcob
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
def test_mix_dihedral(self):
space = FlipRot2dOnR2(5)
g = space.fibergroup
rr = directsum(list(g.representations.values()))
N = 3
size = rr.size * N
bcob = ortho_group.rvs(dim=size//5)
bsize = bcob.shape[0]
p = np.eye(size, size)
for i in range(size//bsize):
p[i*bsize:(i+1)*bsize, i*bsize:(i+1)*bsize] = bcob
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
def test_mix_so2(self):
space = Rot2dOnR2(-1, maximum_frequency=4)
g = space.fibergroup
rr = directsum(list(g.representations.values()))
N = 3
size = rr.size * N
bcob = ortho_group.rvs(dim=size//5)
bsize = bcob.shape[0]
p = np.eye(size, size)
for i in range(size//bsize):
p[i*bsize:(i+1)*bsize, i*bsize:(i+1)*bsize] = bcob
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
def test_mix_o2(self):
space = FlipRot2dOnR2(-1, maximum_frequency=4)
g = space.fibergroup
rr = directsum(list(g.representations.values()))
N = 3
size = rr.size * N
bcob = ortho_group.rvs(dim=size//5)
bsize = bcob.shape[0]
p = np.eye(size, size)
for i in range(size//bsize):
p[i*bsize:(i+1)*bsize, i*bsize:(i+1)*bsize] = bcob
p = p[:, np.random.permutation(size)]
repr = directsum([rr] * N, change_of_basis=p)
cls = FieldType(space, [repr] * 8)
el = DisentangleModule(cls)
el.check_equivariance()
if __name__ == '__main__':
unittest.main()
| 27.18705
| 62
| 0.539031
| 473
| 3,779
| 4.207188
| 0.154334
| 0.01809
| 0.028141
| 0.032161
| 0.864824
| 0.864824
| 0.864824
| 0.864824
| 0.864824
| 0.808543
| 0
| 0.021116
| 0.335803
| 3,779
| 138
| 63
| 27.384058
| 0.771713
| 0
| 0
| 0.816327
| 0
| 0
| 0.002117
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0b321eab50298e5f5bd78dd84ad74d1af1cf9e9
| 180
|
py
|
Python
|
experiments/13_DRAFT_distribution/experiment/createPreloadList.py
|
thegricean/overinformativeness
|
d20b66148c13af473b57cc4d1736191a49660349
|
[
"MIT"
] | 1
|
2016-10-27T18:41:57.000Z
|
2016-10-27T18:41:57.000Z
|
experiments/13_DRAFT_distribution/experiment/createPreloadList.py
|
thegricean/overinformativeness
|
d20b66148c13af473b57cc4d1736191a49660349
|
[
"MIT"
] | 9
|
2015-11-30T21:44:31.000Z
|
2020-04-21T01:26:05.000Z
|
experiments/13_DRAFT_distribution/experiment/createPreloadList.py
|
thegricean/overinformativeness
|
d20b66148c13af473b57cc4d1736191a49660349
|
[
"MIT"
] | 2
|
2015-11-25T09:53:20.000Z
|
2017-03-17T21:51:18.000Z
|
import os
print("\",\"pictures/".join(os.listdir("/Users/elisakreiss/Documents/Stanford/study/overinformativeness/experiments/10_distributional_learning/experiment/pictures/")))
| 36
| 167
| 0.811111
| 19
| 180
| 7.578947
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011429
| 0.027778
| 180
| 4
| 168
| 45
| 0.811429
| 0
| 0
| 0
| 0
| 0.5
| 0.738889
| 0.683333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
b0c86badeb98418381a44e73ee8b02624d8fa5f3
| 10,012
|
py
|
Python
|
tests/test_client.py
|
shbatm/regenmaschine
|
f8cb86a5c95ea29f29270525a34f11d07dc057bf
|
[
"MIT"
] | 7
|
2018-05-08T05:31:26.000Z
|
2022-02-17T20:06:10.000Z
|
tests/test_client.py
|
shbatm/regenmaschine
|
f8cb86a5c95ea29f29270525a34f11d07dc057bf
|
[
"MIT"
] | 98
|
2017-07-02T21:26:51.000Z
|
2022-03-01T20:35:31.000Z
|
tests/test_client.py
|
shbatm/regenmaschine
|
f8cb86a5c95ea29f29270525a34f11d07dc057bf
|
[
"MIT"
] | 11
|
2017-07-05T21:28:13.000Z
|
2021-10-03T21:49:57.000Z
|
"""Define tests for the client object."""
# pylint: disable=protected-access
import asyncio
from datetime import datetime, timedelta
import aiohttp
import pytest
from regenmaschine import Client
from regenmaschine.errors import RequestError, TokenExpiredError
import tests.async_mock as mock
from tests.common import (
TEST_ACCESS_TOKEN,
TEST_API_VERSION,
TEST_EMAIL,
TEST_HOST,
TEST_HW_VERSION,
TEST_MAC,
TEST_NAME,
TEST_PASSWORD,
TEST_PORT,
TEST_SW_VERSION,
load_fixture,
)
@pytest.mark.asyncio
async def test_legacy_login(authenticated_local_client):
"""Test loading a local client through the legacy method."""
async with authenticated_local_client:
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(TEST_HOST, TEST_PASSWORD, port=TEST_PORT, ssl=False)
controller = next(iter(client.controllers.values()))
assert controller._access_token == TEST_ACCESS_TOKEN
assert controller.api_version == TEST_API_VERSION
assert controller.hardware_version == TEST_HW_VERSION
assert controller.mac == TEST_MAC
assert controller.name == TEST_NAME
assert controller.software_version == TEST_SW_VERSION
@pytest.mark.asyncio
async def test_load_local(authenticated_local_client):
"""Test loading a local client."""
async with authenticated_local_client:
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(TEST_HOST, TEST_PASSWORD, TEST_PORT, False)
assert len(client.controllers) == 1
controller = client.controllers[TEST_MAC]
assert controller._access_token == TEST_ACCESS_TOKEN
assert controller.api_version == TEST_API_VERSION
assert controller.hardware_version == TEST_HW_VERSION
assert controller.mac == TEST_MAC
assert controller.name == TEST_NAME
assert controller.software_version == TEST_SW_VERSION
@pytest.mark.asyncio
async def test_load_local_skip(aresponses, authenticated_local_client):
"""Test skipping the loading of a local client if it's already loaded."""
authenticated_local_client.add(
f"{TEST_HOST}:{TEST_PORT}",
"/api/4/auth/login",
"post",
aresponses.Response(text=load_fixture("auth_login_response.json"), status=200),
)
authenticated_local_client.add(
f"{TEST_HOST}:{TEST_PORT}",
"/api/4/provision/wifi",
"get",
aresponses.Response(
text=load_fixture("provision_wifi_response.json"), status=200
),
)
async with authenticated_local_client:
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(TEST_HOST, TEST_PASSWORD, TEST_PORT, True)
controller = client.controllers[TEST_MAC]
await client.load_local(TEST_HOST, TEST_PASSWORD, TEST_PORT, True)
assert len(client.controllers) == 1
assert client.controllers[TEST_MAC] == controller
@pytest.mark.asyncio
async def test_load_local_failure(aresponses):
"""Test loading a local client and receiving a fail response."""
aresponses.add(
f"{TEST_HOST}:{TEST_PORT}",
"/api/4/auth/login",
"post",
aresponses.Response(text=None, status=500),
)
with pytest.raises(RequestError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(TEST_HOST, TEST_PASSWORD, TEST_PORT, False)
@pytest.mark.asyncio
async def test_load_remote(authenticated_remote_client, event_loop):
"""Test loading a remote client."""
async with authenticated_remote_client:
async with aiohttp.ClientSession(loop=event_loop) as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD)
assert len(client.controllers) == 1
controller = client.controllers[TEST_MAC]
assert controller._access_token == TEST_ACCESS_TOKEN
assert controller.api_version == TEST_API_VERSION
assert controller.hardware_version == TEST_HW_VERSION
assert controller.mac == TEST_MAC
assert controller.name == TEST_NAME
assert controller.software_version == TEST_SW_VERSION
@pytest.mark.asyncio
async def test_load_remote_skip(aresponses, authenticated_remote_client):
"""Test skipping the loading of a remote client if it's already loaded."""
authenticated_remote_client.add(
"my.rainmachine.com",
"/login/auth",
"post",
aresponses.Response(
text=load_fixture("remote_auth_login_1_response.json"), status=200
),
)
authenticated_remote_client.add(
"my.rainmachine.com",
"/devices/get-sprinklers",
"post",
aresponses.Response(
text=load_fixture("remote_sprinklers_response.json"), status=200
),
)
async with authenticated_remote_client:
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD, True)
controller = client.controllers[TEST_MAC]
await client.load_remote(TEST_EMAIL, TEST_PASSWORD, True)
assert len(client.controllers) == 1
assert client.controllers[TEST_MAC] == controller
@pytest.mark.asyncio
async def test_load_remote_failure(aresponses):
"""Test loading a remote client and receiving a fail response."""
aresponses.add(
"my.rainmachine.com",
"/login/auth",
"post",
aresponses.Response(
text=load_fixture("unauthenticated_response.json"), status=401
),
)
with pytest.raises(TokenExpiredError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD)
@pytest.mark.asyncio
async def test_remote_error_known(aresponses):
"""Test that remote error handling works."""
aresponses.add(
"my.rainmachine.com",
"/login/auth",
"post",
aresponses.Response(
text=load_fixture("remote_error_known_response.json"), status=200
),
)
with pytest.raises(RequestError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD)
@pytest.mark.asyncio
async def test_remote_error_http_body(aresponses):
"""Test that remote error handling works."""
aresponses.add(
"my.rainmachine.com",
"/login/auth",
"post",
aresponses.Response(
text=load_fixture("remote_error_http_body_response.json"), status=200
),
)
with pytest.raises(RequestError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD)
@pytest.mark.asyncio
async def test_remote_error_unknown(aresponses):
"""Test that remote error handling works."""
aresponses.add(
"my.rainmachine.com",
"/login/auth",
"post",
aresponses.Response(
text=load_fixture("remote_error_unknown_response.json"), status=200
),
)
with pytest.raises(RequestError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_remote(TEST_EMAIL, TEST_PASSWORD)
@pytest.mark.asyncio
async def test_request_timeout(authenticated_local_client): # noqa: D202
"""Test whether the client properly raises an error on timeout."""
async def long_running_login(*args, **kwargs): # pylint: disable=unused-argument
"""Define a method that takes 0.5 seconds to execute."""
await asyncio.sleep(0.5)
with mock.patch.object(aiohttp.ClientResponse, "json", long_running_login):
async with authenticated_local_client:
async with aiohttp.ClientSession() as session:
with pytest.raises(RequestError):
client = Client(session=session, request_timeout=0.1)
await client.load_local(
TEST_HOST, TEST_PASSWORD, port=TEST_PORT, ssl=False
)
@pytest.mark.asyncio
async def test_token_expired_explicit_exception(aresponses):
"""Test that the appropriate error is thrown when a token expires explicitly."""
aresponses.add(
f"{TEST_HOST}:{TEST_PORT}",
"/api/4/auth/login",
"post",
aresponses.Response(
text=load_fixture("unauthenticated_response.json"), status=401
),
)
with pytest.raises(TokenExpiredError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(TEST_HOST, TEST_PASSWORD, TEST_PORT, False)
@pytest.mark.asyncio
async def test_token_expired_implicit_exception(authenticated_local_client):
"""Test that the appropriate error is thrown when a token expires implicitly."""
async with authenticated_local_client:
with pytest.raises(TokenExpiredError):
async with aiohttp.ClientSession() as session:
client = Client(session=session)
await client.load_local(
TEST_HOST, TEST_PASSWORD, port=TEST_PORT, ssl=False
)
controller = next(iter(client.controllers.values()))
controller._access_token_expiration = datetime.now() - timedelta(
hours=1
)
await controller._request("get", "random/endpoint")
| 35.378092
| 88
| 0.664702
| 1,132
| 10,012
| 5.663428
| 0.134276
| 0.028077
| 0.035096
| 0.044611
| 0.812978
| 0.781625
| 0.777102
| 0.735923
| 0.707378
| 0.667135
| 0
| 0.006485
| 0.245306
| 10,012
| 282
| 89
| 35.503546
| 0.84198
| 0.011187
| 0
| 0.69863
| 0
| 0
| 0.075719
| 0.045409
| 0
| 0
| 0
| 0
| 0.109589
| 1
| 0
| false
| 0.073059
| 0.03653
| 0
| 0.03653
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7c0cd565dd87c6ee99c8879fb94b96c408861f34
| 6,028
|
py
|
Python
|
vgg.py
|
akkaze/cnn-without-any-downsampling
|
53de97974642284b51cdf2d3eca5b1be5c000712
|
[
"MIT"
] | 80
|
2019-12-07T13:26:12.000Z
|
2022-03-13T10:12:31.000Z
|
vgg.py
|
akkaze/cnn-without-any-downsampling
|
53de97974642284b51cdf2d3eca5b1be5c000712
|
[
"MIT"
] | 1
|
2020-03-18T09:52:34.000Z
|
2020-11-18T14:34:53.000Z
|
vgg.py
|
akkaze/cnn-without-any-downsampling
|
53de97974642284b51cdf2d3eca5b1be5c000712
|
[
"MIT"
] | 8
|
2019-12-31T06:58:22.000Z
|
2021-12-10T21:27:30.000Z
|
import tensorflow as tf
from tensorflow.python.keras.models import Sequential, Model
from tensorflow.python.keras.layers import Input, Dense, Dropout, Activation, Flatten
from tensorflow.python.keras.layers import Conv2D, DepthwiseConv2D, MaxPooling2D, BatchNormalization, GlobalAveragePooling2D
def vgg(input_shape, num_classes, use_larger_kernel=False, use_downsampling=False):
inp = Input(input_shape)
kernel_size = 3
dilation_rate = 1
strides = 1
x = Conv2D(64, (kernel_size, kernel_size),
padding='same',
strides=strides,
dilation_rate=dilation_rate,
input_shape=input_shape)(inp)
x = BatchNormalization()(x)
x = Activation('relu')(x)
if not use_downsampling:
if use_larger_kernel:
kernel_size += 2
else:
dilation_rate *= 2
else:
strides = 2
x = Conv2D(128, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# model.add(MaxPooling2D(pool_size=(2, 2)))
if not use_downsampling:
if use_larger_kernel:
kernel_size += 2
else:
dilation_rate *= 2
else:
strides = 2
x = Conv2D(256, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
if not use_downsampling:
if use_larger_kernel:
kernel_size += 2
else:
dilation_rate *= 2
else:
strides = 2
x = Conv2D(512, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
# model.add(MaxPooling2D(pool_size=(2, 2)))
x = GlobalAveragePooling2D()(x)
x = Dense(num_classes)(x)
out = Activation('softmax')(x)
model = tf.keras.models.Model([inp], [out])
return model
def vgg_3x3(input_shape, num_classes):
inp = Input(input_shape)
kernel_size = 3
dilation_rate = 1
strides = 1
x = Conv2D(16, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(inp)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(20, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(24, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(28, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(30, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(32, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(36, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(38, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(42, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(48, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = GlobalAveragePooling2D()(x)
x = Dense(num_classes)(x)
out = Activation('softmax')(x)
model = tf.keras.models.Model([inp], [out])
return model
def vgg_dep_wise_3x3(input_shape, num_classes):
inp = Input(input_shape)
kernel_size = 3
dilation_rate = 1
strides = 1
x = Conv2D(16, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(inp)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(24, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(32, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = DepthwiseConv2D((kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(48, (kernel_size, kernel_size), padding='same', strides=strides, dilation_rate=dilation_rate)(x)
x = GlobalAveragePooling2D()(x)
x = Dense(num_classes)(x)
out = Activation('softmax')(x)
model = tf.keras.models.Model([inp], [out])
return model
return model
| 38.890323
| 124
| 0.670372
| 768
| 6,028
| 5.079427
| 0.088542
| 0.032812
| 0.098436
| 0.123045
| 0.921559
| 0.921559
| 0.902589
| 0.902589
| 0.90182
| 0.900282
| 0
| 0.020033
| 0.188454
| 6,028
| 155
| 125
| 38.890323
| 0.777392
| 0.013769
| 0
| 0.835938
| 0
| 0
| 0.034494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023438
| false
| 0
| 0.03125
| 0
| 0.085938
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9fecf41f2f09ab4439b729f71f1dc39b70c0a884
| 10,489
|
py
|
Python
|
cl2py_ND.py
|
arturxz/TCC
|
441f5e1f842abb67743bf57bd7346b6cd3353091
|
[
"MIT"
] | 2
|
2019-06-02T17:09:17.000Z
|
2021-02-17T19:57:37.000Z
|
cl2py_ND.py
|
arturxz/TCC
|
441f5e1f842abb67743bf57bd7346b6cd3353091
|
[
"MIT"
] | null | null | null |
cl2py_ND.py
|
arturxz/TCC
|
441f5e1f842abb67743bf57bd7346b6cd3353091
|
[
"MIT"
] | null | null | null |
"""
************************************************************************
*** ***
*** Source code generated by cl2py.pl ***
*** ***
*** Please do not edit ***
*** ***
************************************************************************
"""
#!/usr/bin/python3 python3
# OPENCL LIBRARY
import pyopencl as cl
# VGL LIBRARYS
import vgl_lib as vl
#TO WORK WITH MAIN
import numpy as np
"""
/** N-dimensional convolution
SHAPE directive passes a structure with size of each dimension, offsets and number of dimensions. Parameter does not appear in wrapper parameter list. The C expression between parenthesis returns the desired shape of type VglClShape.
*/
"""
def vglClNdConvolution(img_input, img_output, window):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
# CREATING OPENCL BUFFER TO VglClShape
mobj_img_shape = img_input.getVglShape().get_asVglClShape_buffer()
# EVALUATING IF window IS IN CORRECT TYPE
if( not isinstance(window, vl.VglStrEl) ):
print("vglClNdConvolution: Error: window is not a VglClStrEl object. aborting execution.")
exit()
# CREATING OPENCL BUFFER TO VglClStrEl
mobj_window = window.get_asVglClStrEl_buffer()
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdConvolution.cl", "vglClNdConvolution")
_kernel = _program.vglClNdConvolution
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
_kernel.set_arg(2, mobj_img_shape)
_kernel.set_arg(3, mobj_window)
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
mobj_img_shape = None
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
"""
/** Copy N-dimensional image.
*/
"""
def vglClNdCopy(img_input, img_output):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdCopy.cl", "vglClNdCopy")
_kernel = _program.vglClNdCopy
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
"""
/** N-dimensional dilation
SHAPE directive passes a structure with size of each dimension, offsets and number of dimensions. Parameter does not appear in wrapper parameter list. The C expression between parenthesis returns the desired shape of type VglClShape.
*/
"""
def vglClNdDilate(img_input, img_output, window):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
# CREATING OPENCL BUFFER TO VglClShape
mobj_img_shape = img_input.getVglShape().get_asVglClShape_buffer()
# EVALUATING IF window IS IN CORRECT TYPE
if( not isinstance(window, vl.VglStrEl) ):
print("vglClNdConvolution: Error: window is not a VglClStrEl object. aborting execution.")
exit()
# CREATING OPENCL BUFFER TO VglClStrEl
mobj_window = window.get_asVglClStrEl_buffer()
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdDilate.cl", "vglClNdDilate")
_kernel = _program.vglClNdDilate
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
_kernel.set_arg(2, mobj_img_shape)
_kernel.set_arg(3, mobj_window)
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
mobj_img_shape = None
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
"""
/** N-dimensional erosion
SHAPE directive passes a structure with size of each dimension, offsets and number of dimensions. Parameter does not appear in wrapper parameter list. The C expression between parenthesis returns the desired shape of type VglClShape.
*/
"""
def vglClNdErode(img_input, img_output, window):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
# CREATING OPENCL BUFFER TO VglClShape
mobj_img_shape = img_input.getVglShape().get_asVglClShape_buffer()
# EVALUATING IF window IS IN CORRECT TYPE
if( not isinstance(window, vl.VglStrEl) ):
print("vglClNdConvolution: Error: window is not a VglClStrEl object. aborting execution.")
exit()
# CREATING OPENCL BUFFER TO VglClStrEl
mobj_window = window.get_asVglClStrEl_buffer()
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdErode.cl", "vglClNdErode")
_kernel = _program.vglClNdErode
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
_kernel.set_arg(2, mobj_img_shape)
_kernel.set_arg(3, mobj_window)
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
mobj_img_shape = None
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
"""
/** Invert N-dimensional image.
*/
"""
def vglClNdNot(img_input, img_output):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdNot.cl", "vglClNdNot")
_kernel = _program.vglClNdNot
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
"""
/** Threshold of img_input by parameter. if the pixel is below thresh,
the output is 0, else, the output is top. Result is stored in img_output.
*/
"""
def vglClNdThreshold(img_input, img_output, thresh, top = 255):
if( not img_input.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_input isn't.")
exit(1)
if( not img_output.clForceAsBuf == vl.IMAGE_ND_ARRAY() ):
print("vglClNdCopy: Error: this function supports only OpenCL data as buffer and img_output isn't.")
exit(1)
vl.vglCheckContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglCheckContext(img_output, vl.VGL_CL_CONTEXT())
# EVALUATING IF thresh IS IN CORRECT TYPE
if( not isinstance(thresh, np.uint8) ):
print("vglClConvolution: Warning: thresh not np.uint8! Trying to convert...")
try:
thresh = np.uint8(thresh)
except Exception as e:
print("vglClConvolution: Error!! Impossible to convert thresh as a np.uint8 object.")
print(str(e))
exit()
# EVALUATING IF top IS IN CORRECT TYPE
if( not isinstance(top, np.uint8) ):
print("vglClConvolution: Warning: top not np.uint8! Trying to convert...")
try:
top = np.uint8(top)
except Exception as e:
print("vglClConvolution: Error!! Impossible to convert top as a np.uint8 object.")
print(str(e))
exit()
_program = vl.get_ocl_context().get_compiled_kernel("CL_ND/vglClNdThreshold.cl", "vglClNdThreshold")
_kernel = _program.vglClNdThreshold
_kernel.set_arg(0, img_input.get_oclPtr())
_kernel.set_arg(1, img_output.get_oclPtr())
_kernel.set_arg(2, thresh)
_kernel.set_arg(3, top)
# THIS IS A BLOCKING COMMAND. IT EXECUTES THE KERNEL.
cl.enqueue_nd_range_kernel(vl.get_ocl().commandQueue, _kernel, img_input.get_ipl().shape, None)
vl.vglSetContext(img_input, vl.VGL_CL_CONTEXT())
vl.vglSetContext(img_output, vl.VGL_CL_CONTEXT())
| 36.420139
| 237
| 0.677281
| 1,391
| 10,489
| 4.871316
| 0.107117
| 0.054309
| 0.024793
| 0.049587
| 0.866883
| 0.856553
| 0.856553
| 0.839433
| 0.839433
| 0.830874
| 0
| 0.005663
| 0.208695
| 10,489
| 287
| 238
| 36.547038
| 0.810723
| 0.125083
| 0
| 0.780142
| 0
| 0
| 0.228955
| 0.011918
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042553
| false
| 0
| 0.021277
| 0
| 0.06383
| 0.148936
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b00252c33899f790306896276b85902b613959d0
| 19,771
|
py
|
Python
|
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_13.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_13.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
optimization/first_sdEta_mjj_optimization/sdEta_mistake_analyses/sdEta_mmjj_gridsearch/analysis_deltaeta6.1_mmjj_750/Output/Histos/MadAnalysis5job_0/selection_13.py
|
sheride/axion_pheno
|
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
|
[
"MIT"
] | null | null | null |
def selection_13():
# Library import
import numpy
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# Library version
matplotlib_version = matplotlib.__version__
numpy_version = numpy.__version__
# Histo binning
xBinning = numpy.linspace(0.0,1000.0,81,endpoint=True)
# Creating data sequence: middle of each bin
xData = numpy.array([6.25,18.75,31.25,43.75,56.25,68.75,81.25,93.75,106.25,118.75,131.25,143.75,156.25,168.75,181.25,193.75,206.25,218.75,231.25,243.75,256.25,268.75,281.25,293.75,306.25,318.75,331.25,343.75,356.25,368.75,381.25,393.75,406.25,418.75,431.25,443.75,456.25,468.75,481.25,493.75,506.25,518.75,531.25,543.75,556.25,568.75,581.25,593.75,606.25,618.75,631.25,643.75,656.25,668.75,681.25,693.75,706.25,718.75,731.25,743.75,756.25,768.75,781.25,793.75,806.25,818.75,831.25,843.75,856.25,868.75,881.25,893.75,906.25,918.75,931.25,943.75,956.25,968.75,981.25,993.75])
# Creating weights for histo: y14_THT_0
y14_THT_0_weights = numpy.array([0.0,0.0,0.0,6.5095949214,21.1050035345,28.9820213891,33.1129541662,37.2397909466,39.8067809439,42.3246469795,44.6214051877,47.3972030221,48.875161869,50.9263202687,52.8136787963,54.6396373717,55.7081965381,56.637555813,57.5914750688,58.6641142319,58.9015940467,59.6098734941,60.8749525071,59.6507934622,61.656911897,61.7387918332,60.2321530086,60.5310327754,60.5719927435,60.2690329798,60.07659313,60.0479531523,59.5238735612,58.5822342958,58.6149942703,58.1155146599,57.2762353147,55.8719564103,55.6549965796,55.2251169149,54.717437311,52.9323987037,52.9774386685,52.5311990167,51.7410396331,51.491279828,49.8372811184,50.0952009172,48.4002822395,47.4954829454,46.5702036673,45.1127248043,45.6940843508,44.7156051142,43.0861263854,42.4433668869,41.0964079378,40.6992882476,40.2448486021,39.1230694773,37.5755066847,36.830383266,35.5489322657,35.6963201508,34.2470132815,33.4159179299,32.4374306932,32.3187027859,31.4835074375,31.049535776,29.530628961,29.0270573539,28.286029932,27.2338467529,26.2717395035,26.2635515099,24.5153768738,24.0568412315,23.4468217075,23.287149832])
# Creating weights for histo: y14_THT_1
y14_THT_1_weights = numpy.array([0.0,0.0,0.0,128.355203897,540.10916779,898.481420235,1125.05262026,1242.73821133,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_2
y14_THT_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1329.56129523,1331.80337594,1277.32775677,1183.88472538,1067.52420782,956.796990331,853.683178111,756.400180637,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_3
y14_THT_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,697.418571648,620.484145563,553.23040658,492.187923844,433.74263917,388.327301768,343.912899231,304.830898166,273.707218974,246.771110323,216.749727002,194.334238005,172.836563785,155.247118108,140.128890341,124.889760628,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_4
y14_THT_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,114.771801681,103.945360762,94.0644031228,85.1520167049,76.9888259791,70.1053726061,63.6438353345,58.2347234062,52.7310551333,48.2796518706,44.3866881982,40.7696574958,36.9100430726,33.9221748153,31.2683562064,28.647979038,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_5
y14_THT_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,26.2191123089,24.3547872383,22.526195005,20.909979295,19.2333926545,17.8391478406,16.4913505138,15.3220213243,14.086715818,13.1766987596,12.2434439542,11.4241013207,10.6311612169,9.79571460067,9.10226710257,8.53441466568,0.000251448706357,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_6
y14_THT_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,8.02431558262,7.51030804877,6.95208235154,6.61078486788,6.21951215253,5.74100470726,5.48699893441,5.10793312325,4.79790514766,4.51395242298,4.23149219752,3.93685705714,3.75133430602,3.49375773129,3.27720739094,3.08477395858])
# Creating weights for histo: y14_THT_7
y14_THT_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_8
y14_THT_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_9
y14_THT_9_weights = numpy.array([0.0,0.0,0.0,45503.4508982,138097.118561,178482.936154,181502.27482,171105.624592,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_10
y14_THT_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,160424.802241,157100.582391,146319.155163,117179.124838,93107.5900059,74054.4547792,59162.2792087,48131.4114061,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_11
y14_THT_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,38962.5545651,32355.4487352,26998.4370463,22670.6752968,19174.8034575,16215.8131434,13782.4002913,11839.5153891,10294.1439018,8822.55620979,7548.61672003,6661.14017414,5800.7249869,5069.22299628,4461.124336,3970.54643612,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_12
y14_THT_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,3511.0949143,3146.79112757,2820.84302489,2519.97871399,2263.17156094,2043.11621581,1834.28800363,1661.79187548,1505.52917439,1359.61453285,1231.31713763,1124.97734144,1025.34812608,928.286497499,856.582962369,783.520229737,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_13
y14_THT_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,699.460949776,650.756719831,602.350018966,555.76332466,510.87490668,470.624981011,433.548961819,405.021973172,373.599309911,346.072378215,316.916955721,296.3685161,276.853327069,257.440753105,239.772513915,223.559394,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_14
y14_THT_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,203.115551013,192.915824447,181.154470828,168.440151122,158.522055294,149.341201024,139.273825515,130.652231411,122.84767424,116.084073524,108.238656811,101.433504019,95.1085470256,89.8081949754,85.2192298579,80.4755217311])
# Creating weights for histo: y14_THT_15
y14_THT_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating weights for histo: y14_THT_16
y14_THT_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0])
# Creating a new Canvas
fig = plt.figure(figsize=(12,6),dpi=80)
frame = gridspec.GridSpec(1,1,right=0.7)
pad = fig.add_subplot(frame[0])
# Creating a new Stack
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights+y14_THT_14_weights+y14_THT_15_weights+y14_THT_16_weights,\
label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#e5e5e5", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights+y14_THT_14_weights+y14_THT_15_weights,\
label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#f2f2f2", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights+y14_THT_14_weights,\
label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights,\
label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ccc6aa", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights,\
label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#c1bfa8", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights,\
label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#bab5a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights,\
label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b2a596", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights,\
label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#b7a39b", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights,\
label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#ad998c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights,\
label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#9b8e82", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights,\
label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#876656", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights,\
label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#afcec6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights,\
label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#84c1a3", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights,\
label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#89a8a0", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights,\
label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#829e8c", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights+y14_THT_1_weights,\
label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#adbcc6", linewidth=4, linestyle="dashdot",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
pad.hist(x=xData, bins=xBinning, weights=y14_THT_0_weights,\
label="$signal$", histtype="step", rwidth=1.0,\
color=None, edgecolor="#7a8e99", linewidth=3, linestyle="dashed",\
bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical")
# Axis
plt.rc('text',usetex=False)
plt.xlabel(r"THT",\
fontsize=16,color="black")
plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\
fontsize=16,color="black")
# Boundary of y-axis
ymax=(y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights+y14_THT_14_weights+y14_THT_15_weights+y14_THT_16_weights).max()*1.1
ymin=0 # linear scale
#ymin=min([x for x in (y14_THT_0_weights+y14_THT_1_weights+y14_THT_2_weights+y14_THT_3_weights+y14_THT_4_weights+y14_THT_5_weights+y14_THT_6_weights+y14_THT_7_weights+y14_THT_8_weights+y14_THT_9_weights+y14_THT_10_weights+y14_THT_11_weights+y14_THT_12_weights+y14_THT_13_weights+y14_THT_14_weights+y14_THT_15_weights+y14_THT_16_weights) if x])/100. # log scale
plt.gca().set_ylim(ymin,ymax)
# Log/Linear scale for X-axis
plt.gca().set_xscale("linear")
#plt.gca().set_xscale("log",nonposx="clip")
# Log/Linear scale for Y-axis
plt.gca().set_yscale("linear")
#plt.gca().set_yscale("log",nonposy="clip")
# Legend
plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.)
# Saving the image
plt.savefig('../../HTML/MadAnalysis5job_0/selection_13.png')
plt.savefig('../../PDF/MadAnalysis5job_0/selection_13.png')
plt.savefig('../../DVI/MadAnalysis5job_0/selection_13.eps')
# Running!
if __name__ == '__main__':
selection_13()
| 101.912371
| 1,115
| 0.697233
| 4,665
| 19,771
| 2.798714
| 0.132905
| 0.341605
| 0.505974
| 0.66636
| 0.656786
| 0.656786
| 0.647901
| 0.641774
| 0.638863
| 0.633578
| 0
| 0.351503
| 0.083253
| 19,771
| 193
| 1,116
| 102.440415
| 0.368828
| 0.068332
| 0
| 0.185841
| 0
| 0.00885
| 0.057224
| 0.011042
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00885
| false
| 0
| 0.035398
| 0
| 0.044248
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b002cef718c2cf111792acb2076c7133bb00039e
| 108
|
py
|
Python
|
pyscrap3/__init__.py
|
Zincr0/pyscrap3
|
2eaf03f3598953eddfd6df9de3ea85ee0b75d441
|
[
"Apache-2.0"
] | 1
|
2015-01-17T13:16:25.000Z
|
2015-01-17T13:16:25.000Z
|
pyscrap3/__init__.py
|
Zincr0/pyscrap3
|
2eaf03f3598953eddfd6df9de3ea85ee0b75d441
|
[
"Apache-2.0"
] | null | null | null |
pyscrap3/__init__.py
|
Zincr0/pyscrap3
|
2eaf03f3598953eddfd6df9de3ea85ee0b75d441
|
[
"Apache-2.0"
] | null | null | null |
from pyscrap3.spiders import Spider
from pyscrap3.spiders import Item
from pyscrap3.spiders import ItemList
| 27
| 37
| 0.861111
| 15
| 108
| 6.2
| 0.466667
| 0.387097
| 0.612903
| 0.806452
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03125
| 0.111111
| 108
| 3
| 38
| 36
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b03fb660f4f1157eccbe59bbb63c64a12e099150
| 22,601
|
py
|
Python
|
tests/test_base.py
|
nxdevel/nx_tempfile
|
b37f7b84f3832b6337d90479baa2defc8297758f
|
[
"MIT"
] | null | null | null |
tests/test_base.py
|
nxdevel/nx_tempfile
|
b37f7b84f3832b6337d90479baa2defc8297758f
|
[
"MIT"
] | null | null | null |
tests/test_base.py
|
nxdevel/nx_tempfile
|
b37f7b84f3832b6337d90479baa2defc8297758f
|
[
"MIT"
] | null | null | null |
# pylint: disable=missing-docstring, no-self-use, invalid-name
import os
import io
import codecs
import pytest
from nx_tempfile import NamedTemporaryFile, TemporaryFile
def reference(data, encoding, errors):
"Round-trip the data according to the object encoding rules."
with io.TextIOWrapper(io.BytesIO(), encoding=encoding,
errors=errors, newline='\n') as fobj:
fobj.write(data)
fobj.seek(0)
return fobj.read()
class TestTemporaryFile:
def test_pass_through_utf8(self):
data = '12\u00d6\n'
fobj = TemporaryFile('xt+', encoding='utf-8')
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
fobj = TemporaryFile('xt+', encoding='utf-8', errors=None)
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
fobj = TemporaryFile('xt+', encoding='utf-8', errors='strict')
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_pass_through_utf8_context(self):
data = '12\u00d6\n'
with TemporaryFile('xt+', encoding='utf-8') as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
with TemporaryFile('xt+', encoding='utf-8', errors=None) as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
with TemporaryFile('xt+', encoding='utf-8', errors='strict') as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_pass_through_ascii(self):
data = '12\u00d6\n'
fobj = TemporaryFile('xt+', encoding='ascii')
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
fobj = TemporaryFile('xt+', encoding='ascii', errors=None)
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
fobj = TemporaryFile('xt+', encoding='ascii', errors='strict')
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_pass_through_ascii_context(self):
data = '12\u00d6\n'
with TemporaryFile('xt+', encoding='ascii') as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
with TemporaryFile('xt+', encoding='ascii', errors=None) as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
with TemporaryFile('xt+', encoding='ascii', errors='strict') as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_ignore(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
fobj = TemporaryFile('xt+', encoding='ascii', errors='ignore')
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_ignore_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
with TemporaryFile('xt+', encoding='ascii', errors='ignore') as fobj:
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_ignore_buffering(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
fobj = TemporaryFile('xt+', encoding='ascii', errors='ignore',
buffering=1)
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_ignore_buffering_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
with TemporaryFile('xt+', encoding='ascii', errors='ignore',
buffering=1) as fobj:
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_replace(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
fobj = TemporaryFile('xt+', encoding='ascii', errors='replace')
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_replace_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
with TemporaryFile('xt+', encoding='ascii', errors='replace') as fobj:
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_replace_buffering(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
fobj = TemporaryFile('xt+', encoding='ascii', errors='replace',
buffering=1)
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_replace_buffering_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
with TemporaryFile('xt+', encoding='ascii', errors='replace',
buffering=1) as fobj:
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert isinstance(fobj.name, int) or not os.path.exists(fobj.name)
def test_binary_with_errors(self):
with pytest.raises(ValueError):
TemporaryFile('xb+', errors='ignore')
def test_invalid_buffering(self):
# <buffering> is passed to the underlying constructor without checking
# unless <errors> is specified
with pytest.raises(ValueError):
TemporaryFile('xt', errors='ignore', buffering=0)
def test_invalid_iowrap_fail(self):
# A bit of cheating here as we know we can induce an error in the
# text wrapper to ensure the final section of the __init__ gets tested
encoding = 'xxxasciixxx'
with pytest.raises(LookupError):
codecs.lookup(encoding)
with pytest.raises(LookupError):
TemporaryFile('xt', encoding=encoding, errors='ignore')
class TestNamedTemporaryFile:
def test_pass_through_utf8(self):
data = '12\u00d6\n'
fobj = NamedTemporaryFile('xt+', encoding='utf-8')
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='utf-8', errors=None)
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='utf-8', errors='strict')
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_pass_through_utf8_no_delete(self):
data = '12\u00d6\n'
fobj = NamedTemporaryFile('xt+', encoding='utf-8', delete=False)
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='utf-8', errors=None,
delete=False)
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='utf-8', errors='strict',
delete=False)
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
fobj.close()
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
def test_pass_through_utf8_context(self):
data = '12\u00d6\n'
with NamedTemporaryFile('xt+', encoding='utf-8') as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='utf-8', errors=None) as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='utf-8',
errors='strict') as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_pass_through_utf8_context_no_delete(self):
data = '12\u00d6\n'
with NamedTemporaryFile('xt+', encoding='utf-8', delete=False) as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='utf-8', errors=None,
delete=False) as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='utf-8', errors='strict',
delete=False) as fobj:
assert fobj.closed is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == data
assert fobj.closed is True
assert os.path.exists(fobj.name)
os.unlink(fobj.name)
assert not os.path.exists(fobj.name)
def test_pass_through_ascii(self):
data = '12\u00d6\n'
fobj = NamedTemporaryFile('xt+', encoding='ascii')
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors=None)
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors='strict')
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_pass_through_ascii_context(self):
data = '12\u00d6\n'
with NamedTemporaryFile('xt+', encoding='ascii') as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='ascii', errors=None) as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert not os.path.exists(fobj.name)
with NamedTemporaryFile('xt+', encoding='ascii',
errors='strict') as fobj:
assert fobj.closed is False
with pytest.raises(UnicodeEncodeError):
fobj.write(data)
assert fobj.seek(0) == 0
assert fobj.read() == ''
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_ignore(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors='ignore')
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_ignore_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
with NamedTemporaryFile('xt+', encoding='ascii',
errors='ignore') as fobj:
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_ignore_buffering(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors='ignore',
buffering=1)
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_ignore_buffering_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='ignore')
with NamedTemporaryFile('xt+', encoding='ascii', errors='ignore',
buffering=1) as fobj:
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_replace(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors='replace')
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_replace_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
with NamedTemporaryFile('xt+', encoding='ascii',
errors='replace') as fobj:
assert fobj.closed is False
assert fobj.line_buffering is False
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_replace_buffering(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
fobj = NamedTemporaryFile('xt+', encoding='ascii', errors='replace',
buffering=1)
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
fobj.close()
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_replace_buffering_context(self):
data = '12\u00d6\n'
ref = reference(data, encoding='ascii', errors='replace')
with NamedTemporaryFile('xt+', encoding='ascii', errors='replace',
buffering=1) as fobj:
assert fobj.closed is False
assert fobj.line_buffering is True
assert fobj.write(data) == len(data)
assert fobj.seek(0) == 0
assert fobj.read() == ref
assert fobj.closed is True
assert not os.path.exists(fobj.name)
def test_binary_with_errors(self):
with pytest.raises(ValueError):
NamedTemporaryFile('xb+', errors='ignore')
def test_invalid_buffering(self):
# <buffering> is passed to the underlying constructor without checking
# unless <errors> is specified
with pytest.raises(ValueError):
NamedTemporaryFile('xt', errors='ignore', buffering=0)
def test_invalid_iowrap_fail(self):
# A bit of cheating here as we know we can induce an error in the
# text wrapper to ensure the final section of the __init__ gets tested
encoding = 'xxxasciixxx'
with pytest.raises(LookupError):
codecs.lookup(encoding)
with pytest.raises(LookupError):
NamedTemporaryFile('xt', encoding=encoding, errors='ignore',
delete=False)
| 38.700342
| 79
| 0.587408
| 2,787
| 22,601
| 4.723358
| 0.041981
| 0.177758
| 0.11182
| 0.125798
| 0.968399
| 0.959131
| 0.959131
| 0.957916
| 0.957916
| 0.954573
| 0
| 0.016244
| 0.299987
| 22,601
| 583
| 80
| 38.766724
| 0.815814
| 0.025795
| 0
| 0.870079
| 0
| 0
| 0.050251
| 0
| 0
| 0
| 0
| 0
| 0.562992
| 1
| 0.064961
| false
| 0.019685
| 0.009843
| 0
| 0.080709
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.