hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ecd0fde5ae6883995f417b9399eb405c8cd4bccd
| 152
|
py
|
Python
|
gpmmatch/__init__.py
|
vlouf/gpmmatch
|
4b50452b2ca8cc4f7ee89ddce4ae2a65685fa48c
|
[
"MIT"
] | 4
|
2020-06-26T08:52:39.000Z
|
2022-03-10T09:43:28.000Z
|
gpmmatch/__init__.py
|
vlouf/gpmmatch
|
4b50452b2ca8cc4f7ee89ddce4ae2a65685fa48c
|
[
"MIT"
] | null | null | null |
gpmmatch/__init__.py
|
vlouf/gpmmatch
|
4b50452b2ca8cc4f7ee89ddce4ae2a65685fa48c
|
[
"MIT"
] | 3
|
2020-06-26T08:52:43.000Z
|
2022-03-27T17:52:04.000Z
|
# Import functions
from .gpmmatch import volume_matching
from .gpmmatch import vmatch_multi_pass
# Import error class.
from .gpmmatch import NoRainError
| 30.4
| 39
| 0.842105
| 20
| 152
| 6.25
| 0.6
| 0.288
| 0.432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118421
| 152
| 5
| 40
| 30.4
| 0.932836
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
01faa47a6fb70e9b81d488399a6e19eb15a2ecc0
| 672
|
py
|
Python
|
Ciphers/shift.py
|
jgrove2/Cipher-Project
|
f9f93c8c6b1ffd7205be545f0d96b95f3cfb7cc0
|
[
"MIT"
] | null | null | null |
Ciphers/shift.py
|
jgrove2/Cipher-Project
|
f9f93c8c6b1ffd7205be545f0d96b95f3cfb7cc0
|
[
"MIT"
] | null | null | null |
Ciphers/shift.py
|
jgrove2/Cipher-Project
|
f9f93c8c6b1ffd7205be545f0d96b95f3cfb7cc0
|
[
"MIT"
] | null | null | null |
# Shift cipher
# Used with the shift and ceasar option of ciphers
def encrypt(message, shift):
shift = int(shift)
result = ""
for i in range(len(message)):
char = message[i]
if(char.isupper()):
result += chr((ord(char)+ shift-65) % 26 + 65)
elif(char.islower()):
result += chr((ord(char) + shift-97) % 26 + 97)
else:
result += char
return result
def decrypt(message, shift):
shift = int(shift)
result = ""
for i in range(len(message)):
char = message[i]
if(char.isupper()):
result += chr((ord(char) - shift-65) % 26 + 65)
elif(char.islower()):
result += chr((ord(char) - shift-97) % 26 + 97)
else:
result += char
return result
| 22.4
| 50
| 0.61756
| 99
| 672
| 4.191919
| 0.333333
| 0.086747
| 0.115663
| 0.154217
| 0.828916
| 0.828916
| 0.828916
| 0.828916
| 0.828916
| 0.828916
| 0
| 0.045198
| 0.209821
| 672
| 30
| 51
| 22.4
| 0.736347
| 0.090774
| 0
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bf39820d5ea5439c45caaffe6dacf6ae17ab879a
| 94
|
py
|
Python
|
train_word_embedding.py
|
FrederichRiver/taurus
|
1da240b7723bdc99883d7afe0253608cfdababb5
|
[
"BSD-3-Clause"
] | null | null | null |
train_word_embedding.py
|
FrederichRiver/taurus
|
1da240b7723bdc99883d7afe0253608cfdababb5
|
[
"BSD-3-Clause"
] | null | null | null |
train_word_embedding.py
|
FrederichRiver/taurus
|
1da240b7723bdc99883d7afe0253608cfdababb5
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python3
from word_embedding import get_dict, dict_file
v, f = get_dict(dict_file)
| 18.8
| 46
| 0.776596
| 17
| 94
| 4
| 0.705882
| 0.205882
| 0.323529
| 0.441176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012048
| 0.117021
| 94
| 5
| 47
| 18.8
| 0.807229
| 0.180851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
17296d3bf5e5c563aa7799923e37d72dd6e60b19
| 2,005
|
py
|
Python
|
timing.py
|
akashlevy/yaklient
|
0b4479fa2b44a38a1127bb58057458d717dde67d
|
[
"MIT"
] | null | null | null |
timing.py
|
akashlevy/yaklient
|
0b4479fa2b44a38a1127bb58057458d717dde67d
|
[
"MIT"
] | null | null | null |
timing.py
|
akashlevy/yaklient
|
0b4479fa2b44a38a1127bb58057458d717dde67d
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Tests the Yaklient package"""
from yaklient import Location
from yaklient import User
import time
# Testing location
TESTING_GROUNDS = Location(45, 75)
def upvote_timeout():
"""Find the approximate time to wait to upvote a Yak after creating user"""
# Initialize
user = User(TESTING_GROUNDS)
user.post_yak("Test yak")
yak = user.get_yaks()[0]
# Wait for upvote to be processed
print "Started timer"
start_time = time.time()
while yak.likes == 0:
user.upvote_yak(yak)
yak = user.get_yaks()[0]
print yak
end_time = time.time()
# Print elapsed time
elapsed_time = end_time - start_time
print "Elapsed time: %f" % elapsed_time
def cupvote_timeout():
"""Find the approximate time to wait to upvote a comment after creating
user"""
# Initialize
user = User(TESTING_GROUNDS)
user.post_yak("Test yak")
yak = user.get_yaks()[0]
while not yak.loaded:
yak = user.get_yaks()[0]
user.post_comment("Test comment", yak)
comment = user.get_comments(yak)[0]
# Wait for upvote to be processed
print "Started timer"
start_time = time.time()
while comment.likes == 0:
user.upvote_comment(yak)
yak = user.get_yaks()[0]
print yak
end_time = time.time()
# Print elapsed time
elapsed_time = end_time - start_time
print "Elapsed time: %f" % elapsed_time
def downvote_timeout():
"""Find the approximate time to wait to downvote a Yak after creating
user"""
# Initialize
user = User(TESTING_GROUNDS)
user.post_yak("Test yak")
yak = user.get_yaks()[0]
# Wait for upvote to be processed
print "Started timer"
start_time = time.time()
while yak.likes == 0:
user.downvote_yak(yak)
yak = user.get_yaks()[0]
print yak
end_time = time.time()
# Print elapsed time
elapsed_time = end_time - start_time
print "Elapsed time: %f" % elapsed_time
| 24.753086
| 79
| 0.642893
| 279
| 2,005
| 4.476703
| 0.182796
| 0.076861
| 0.056045
| 0.078463
| 0.771017
| 0.759007
| 0.759007
| 0.759007
| 0.729384
| 0.729384
| 0
| 0.010688
| 0.253367
| 2,005
| 80
| 80
| 25.0625
| 0.823647
| 0.111721
| 0
| 0.702128
| 0
| 0
| 0.082109
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.06383
| null | null | 0.191489
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bda8d227c90145b9d801e7917962e86ac3ff89af
| 152
|
py
|
Python
|
python/testData/psi/TrailingBlockCommentsAtEndOfFile.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/psi/TrailingBlockCommentsAtEndOfFile.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/psi/TrailingBlockCommentsAtEndOfFile.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
def foo():
def bar():
def baz():
pass
# baz 1
# baz 2
# baz 3
# bar
# foo
| 15.2
| 23
| 0.263158
| 15
| 152
| 2.666667
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.644737
| 152
| 9
| 24
| 16.888889
| 0.685185
| 0.164474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.75
| true
| 0.25
| 0
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
da09d2060a16836d0b68eaddeb782a3f98d6aab3
| 91,333
|
py
|
Python
|
tlsmate/mappings.py
|
timb-machine-mirrors/tlsmate
|
1313161b9170311f466a3a43b3d84797cecc0291
|
[
"MIT"
] | null | null | null |
tlsmate/mappings.py
|
timb-machine-mirrors/tlsmate
|
1313161b9170311f466a3a43b3d84797cecc0291
|
[
"MIT"
] | null | null | null |
tlsmate/mappings.py
|
timb-machine-mirrors/tlsmate
|
1313161b9170311f466a3a43b3d84797cecc0291
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Module containing various mapping tables
Attributes:
supported_cipher_suites (dict): maps :obj:`tlsmate.tls.CipherSuite` to
:obj:`tlsmate.structs.CipherSuite` objects
supported_ciphers (dict): maps :obj:`tlsmate.tls.SymmetricCipher` to
:obj:`tlsmate.structs.Cipher` objects
supported_macs (dict): maps :obj:`tlsmate.tls.HashPrimitive` to
:obj:`tlsmate.structs.Mac` objects
key_exchange (dict): maps :obj:`tlsmate.tls.KeyExchangeAlgorithm` to
:obj:`tlsmate.structs.KeyExchange` objects
curve_to_group (dict): maps supported group strings to
:obj:`tlsmate.tls.SupportedGroups` objects
"""
# import basic stuff
from typing import Dict
# import own stuff
import tlsmate.structs as structs
import tlsmate.tls as tls
# import other stuff
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.ciphers import algorithms, aead
# this map contains all cipher suites for which a full handshake is supported,
# i.e., application data can be exchanged encrypted
supported_cipher_suites: Dict[tls.CipherSuite, structs.CipherSuite] = {
tls.CipherSuite.TLS_NULL_WITH_NULL_NULL: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.NULL,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.NULL,
),
tls.CipherSuite.TLS_RSA_WITH_NULL_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_RSA_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_EXPORT_WITH_RC4_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_EXPORT,
cipher=tls.SymmetricCipher.RC4_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_RSA_WITH_RC4_128_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_RSA_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_EXPORT_WITH_RC2_CBC_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_EXPORT,
cipher=tls.SymmetricCipher.RC2_CBC_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_RSA_WITH_IDEA_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.IDEA_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_EXPORT_WITH_RC4_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON_EXPORT,
cipher=tls.SymmetricCipher.RC4_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_DH_ANON_WITH_RC4_128_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_DH_ANON_EXPORT_WITH_DES40_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON_EXPORT,
cipher=tls.SymmetricCipher.DES40_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_WITH_DES_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_WITH_IDEA_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.IDEA_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_WITH_DES_CBC_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.DES_CBC,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_WITH_3DES_EDE_CBC_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_WITH_RC4_128_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_WITH_IDEA_CBC_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5,
cipher=tls.SymmetricCipher.IDEA_CBC,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.DES_CBC_40,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_RC2_CBC_40_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.RC2_CBC_40,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_RC4_40_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.RC4_40,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.DES_CBC_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_RC2_CBC_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.RC2_CBC_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_KRB5_EXPORT_WITH_RC4_40_MD5: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.KRB5_EXPORT,
cipher=tls.SymmetricCipher.RC4_40,
mac=tls.HashPrimitive.MD5,
),
tls.CipherSuite.TLS_PSK_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_NULL_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_PSK_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_PSK_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_PSK_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_PSK_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_DSS_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_RSA_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_DH_ANON_WITH_SEED_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.SEED_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_RSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_NULL_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_NULL_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_NULL_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_NULL_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_NULL_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_NULL_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_256_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ANON_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ANON,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ANON_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ANON,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ANON_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ANON,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ANON_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ANON,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDH_ANON_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ANON,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_RSA,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_DSS,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_RSA_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_DSS_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_DSS,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_RSA_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_SRP_SHA_DSS_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.SRP_SHA_DSS,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_RC4_128_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.RC4_128,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_3DES_EDE_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_NULL_SHA: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA1,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_NULL_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_NULL_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.NULL,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_DSS_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_RSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_ANON_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_RSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_DSS_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_ANON_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.ARIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.ARIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_ARIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.ARIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_ARIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.ARIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_RSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_DSS_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_DSS_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_DSS,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DH_ANON_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DH_ANON,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_ECDSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_ECDSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDH_RSA_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDH_RSA,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_CAMELLIA_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_CAMELLIA_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_PSK_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_128_CBC,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.CAMELLIA_256_CBC,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_RSA_WITH_AES_128_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_256_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_128_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_256_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_128_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_WITH_AES_256_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA,
cipher=tls.SymmetricCipher.AES_256_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_128_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_AES_256_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.AES_256_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_128_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_256_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_128_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_AES_256_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_128_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_AES_256_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.AES_256_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_DHE_WITH_AES_128_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK_DHE,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_DHE_WITH_AES_256_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK_DHE,
cipher=tls.SymmetricCipher.AES_256_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_CCM: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_128_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_AES_256_CCM_8: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.AES_256_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECCPWD_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECCPWD,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECCPWD_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECCPWD,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECCPWD_WITH_AES_128_CCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECCPWD,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECCPWD_WITH_AES_256_CCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECCPWD,
cipher=tls.SymmetricCipher.AES_256_CCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_RSA,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_ECDSA,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_RSA,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_PSK_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.PSK,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_DHE_PSK_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.DHE_PSK,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_RSA_PSK_WITH_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.RSA_PSK,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CCM_8_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_ECDHE_PSK_WITH_AES_128_CCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.ECDHE_PSK,
cipher=tls.SymmetricCipher.AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
# ********************
# TLS1.3 cipher suites
# ********************
tls.CipherSuite.TLS_AES_128_GCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE,
cipher=tls.SymmetricCipher.TLS13_AES_128_GCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_AES_256_GCM_SHA384: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE,
cipher=tls.SymmetricCipher.TLS13_AES_256_GCM,
mac=tls.HashPrimitive.SHA384,
),
tls.CipherSuite.TLS_CHACHA20_POLY1305_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE,
cipher=tls.SymmetricCipher.CHACHA20_POLY1305,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_AES_128_CCM_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE,
cipher=tls.SymmetricCipher.TLS13_AES_128_CCM,
mac=tls.HashPrimitive.SHA256,
),
tls.CipherSuite.TLS_AES_128_CCM_8_SHA256: structs.CipherSuite(
key_ex=tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE,
cipher=tls.SymmetricCipher.TLS13_AES_128_CCM_8,
mac=tls.HashPrimitive.SHA256,
),
}
# map cipher to various parameters relevant for the record layer
supported_ciphers: Dict[tls.SymmetricCipher, structs.Cipher] = {
tls.SymmetricCipher.AES_128_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=algorithms.AES,
c_type=tls.CipherType.BLOCK,
key_len=16,
block_size=16,
iv_len=16,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.AES_256_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=algorithms.AES,
c_type=tls.CipherType.BLOCK,
key_len=32,
block_size=16,
iv_len=16,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.AES_128_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESGCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=4,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.AES_256_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESGCM,
c_type=tls.CipherType.AEAD,
key_len=32,
block_size=16,
iv_len=4,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.AES_128_CCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=4,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.AES_128_CCM_8: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=4,
tag_length=8,
cipher_supported=True,
),
tls.SymmetricCipher.AES_256_CCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=32,
block_size=16,
iv_len=4,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.AES_256_CCM_8: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=32,
block_size=16,
iv_len=4,
tag_length=8,
cipher_supported=True,
),
tls.SymmetricCipher.CHACHA20_POLY1305: structs.Cipher(
primitive=tls.CipherPrimitive.CHACHA,
algo=aead.ChaCha20Poly1305,
c_type=tls.CipherType.AEAD,
key_len=32,
block_size=16,
iv_len=12,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.TRIPPLE_DES_EDE_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.TRIPPLE_DES,
algo=algorithms.TripleDES,
c_type=tls.CipherType.BLOCK,
key_len=24,
block_size=8,
iv_len=8,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.CAMELLIA_128_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.CAMELLIA,
algo=algorithms.Camellia,
c_type=tls.CipherType.BLOCK,
key_len=16,
block_size=16,
iv_len=16,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.CAMELLIA_256_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.CAMELLIA,
algo=algorithms.Camellia,
c_type=tls.CipherType.BLOCK,
key_len=32,
block_size=16,
iv_len=16,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.IDEA_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.IDEA,
algo=algorithms.IDEA,
c_type=tls.CipherType.BLOCK,
key_len=16,
block_size=8,
iv_len=8,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.RC4_128: structs.Cipher(
primitive=tls.CipherPrimitive.RC4,
algo=algorithms.ARC4,
c_type=tls.CipherType.STREAM,
key_len=16,
block_size=None,
iv_len=0,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.SEED_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.SEED,
algo=algorithms.SEED,
c_type=tls.CipherType.BLOCK,
key_len=16,
block_size=16,
iv_len=16,
tag_length=None,
cipher_supported=True,
),
tls.SymmetricCipher.TLS13_AES_128_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESGCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=12,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.TLS13_AES_256_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESGCM,
c_type=tls.CipherType.AEAD,
key_len=32,
block_size=16,
iv_len=12,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.TLS13_AES_128_CCM: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=12,
tag_length=16,
cipher_supported=True,
),
tls.SymmetricCipher.TLS13_AES_128_CCM_8: structs.Cipher(
primitive=tls.CipherPrimitive.AES,
algo=aead.AESCCM,
c_type=tls.CipherType.AEAD,
key_len=16,
block_size=16,
iv_len=12,
tag_length=8,
cipher_supported=True,
),
# ***************************
# List of unsupported ciphers
# ***************************
tls.SymmetricCipher.ARIA_128_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.ARIA, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.ARIA_128_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.ARIA, c_type=tls.CipherType.AEAD
),
tls.SymmetricCipher.ARIA_256_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.ARIA, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.ARIA_256_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.ARIA, c_type=tls.CipherType.AEAD
),
tls.SymmetricCipher.CAMELLIA_128_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.CAMELLIA, c_type=tls.CipherType.AEAD
),
tls.SymmetricCipher.CAMELLIA_256_GCM: structs.Cipher(
primitive=tls.CipherPrimitive.CAMELLIA, c_type=tls.CipherType.AEAD
),
tls.SymmetricCipher.DES40_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.DES, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.DES_CBC: structs.Cipher(
primitive=tls.CipherPrimitive.DES, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.DES_CBC_40: structs.Cipher(
primitive=tls.CipherPrimitive.DES, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.NULL: structs.Cipher(
primitive=tls.CipherPrimitive.NULL, c_type=tls.CipherType.NULL
),
tls.SymmetricCipher.RC2_CBC_40: structs.Cipher(
primitive=tls.CipherPrimitive.RC2, c_type=tls.CipherType.BLOCK
),
tls.SymmetricCipher.RC4_40: structs.Cipher(
primitive=tls.CipherPrimitive.RC4, c_type=tls.CipherType.STREAM
),
}
# map hash algorithms to mac parameters
supported_macs: Dict[tls.HashPrimitive, structs.Mac] = {
tls.HashPrimitive.SHA1: structs.Mac(
hash_algo=hashes.SHA1, mac_len=20, key_len=20, hmac_algo=hashes.SHA256
),
tls.HashPrimitive.SHA256: structs.Mac(
hash_algo=hashes.SHA256, mac_len=32, key_len=32, hmac_algo=hashes.SHA256
),
tls.HashPrimitive.SHA384: structs.Mac(
hash_algo=hashes.SHA384, mac_len=48, key_len=48, hmac_algo=hashes.SHA384
),
tls.HashPrimitive.SHA512: structs.Mac(
hash_algo=hashes.SHA512, mac_len=None, key_len=None, hmac_algo=None
),
tls.HashPrimitive.MD5: structs.Mac(
hash_algo=hashes.MD5, mac_len=16, key_len=16, hmac_algo=hashes.SHA256
),
}
key_exchange: Dict[tls.KeyExchangeAlgorithm, structs.KeyExchange] = {
tls.KeyExchangeAlgorithm.DHE_DSS: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.DSS,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.DSA_SHA1,
),
tls.KeyExchangeAlgorithm.DHE_RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.RSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.DH_ANON: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.NONE,
key_ex_supported=True,
default_sig_scheme=None,
),
tls.KeyExchangeAlgorithm.RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.RSA,
key_auth=tls.KeyAuthentication.NONE,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.DH_DSS: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.DSS,
key_ex_supported=False,
default_sig_scheme=tls.SignatureScheme.DSA_SHA1,
),
tls.KeyExchangeAlgorithm.DH_RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.RSA,
key_ex_supported=False,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.ECDH_ECDSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH,
key_auth=tls.KeyAuthentication.ECDSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.ECDSA_SHA1,
),
tls.KeyExchangeAlgorithm.ECDHE_ECDSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH,
key_auth=tls.KeyAuthentication.ECDSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.ECDSA_SHA1,
),
tls.KeyExchangeAlgorithm.ECDH_RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH,
key_auth=tls.KeyAuthentication.RSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.ECDHE_RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH,
key_auth=tls.KeyAuthentication.RSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.DHE_RSA_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH,
key_auth=tls.KeyAuthentication.RSA,
key_ex_supported=True,
default_sig_scheme=tls.SignatureScheme.RSA_PKCS1_SHA1,
),
tls.KeyExchangeAlgorithm.TLS13_KEY_SHARE: structs.KeyExchange(
key_ex_type=None, key_auth=None, key_ex_supported=True, default_sig_scheme=None
),
# **********************************
# Algorithms currently not supported
# **********************************
tls.KeyExchangeAlgorithm.DHE_DSS_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.DSS
),
tls.KeyExchangeAlgorithm.DHE_PSK: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.DH_ANON_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.DH_DSS_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.DSS
),
tls.KeyExchangeAlgorithm.DH_RSA_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.RSA
),
tls.KeyExchangeAlgorithm.ECCPWD: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.ECDHE_PSK: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.ECDH_ANON: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.ECDH, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.KRB5: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.KRB5_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.NULL: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.PSK: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.PSK_DHE: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.DH, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.RSA_EXPORT: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.RSA, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.RSA_PSK: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.RSA, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.SRP_SHA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.NONE
),
tls.KeyExchangeAlgorithm.SRP_SHA_DSS: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.DSS
),
tls.KeyExchangeAlgorithm.SRP_SHA_RSA: structs.KeyExchange(
key_ex_type=tls.KeyExchangeType.NONE, key_auth=tls.KeyAuthentication.RSA
),
}
curve_to_group: Dict[str, tls.SupportedGroups] = {
"brainpoolP256r1": tls.SupportedGroups.BRAINPOOLP256R1,
"brainpoolP384r1": tls.SupportedGroups.BRAINPOOLP384R1,
"brainpoolP512r1": tls.SupportedGroups.BRAINPOOLP512R1,
"secp192r1": tls.SupportedGroups.SECP192R1,
"secp224r1": tls.SupportedGroups.SECP224R1,
"secp256k1": tls.SupportedGroups.SECP256K1,
"secp256r1": tls.SupportedGroups.SECP256R1,
"secp384r1": tls.SupportedGroups.SECP384R1,
"secp521r1": tls.SupportedGroups.SECP521R1,
"sect163k1": tls.SupportedGroups.SECT163K1,
"sect163r2": tls.SupportedGroups.SECT163R2,
"sect233k1": tls.SupportedGroups.SECT233K1,
"sect233r1": tls.SupportedGroups.SECT233R1,
"sect283k1": tls.SupportedGroups.SECT283K1,
"sect283r1": tls.SupportedGroups.SECT283R1,
"sect409k1": tls.SupportedGroups.SECT409K1,
"sect409r1": tls.SupportedGroups.SECT409R1,
"sect571k1": tls.SupportedGroups.SECT571K1,
"sect571r1": tls.SupportedGroups.SECT571R1,
}
issue_to_alert_description: Dict[tls.ServerIssue, tls.AlertDescription] = {
tls.ServerIssue.PSK_OUT_OF_RANGE: tls.AlertDescription.ILLEGAL_PARAMETER,
tls.ServerIssue.KEY_SHARE_NOT_PRESENT: tls.AlertDescription.HANDSHAKE_FAILURE,
tls.ServerIssue.SECURE_RENEG_FAILED: tls.AlertDescription.ILLEGAL_PARAMETER,
tls.ServerIssue.VERIFY_DATA_INVALID: tls.AlertDescription.ILLEGAL_PARAMETER,
tls.ServerIssue.CERT_REQ_NO_SIG_ALGO: tls.AlertDescription.MISSING_EXTENSION,
tls.ServerIssue.EXTENTION_LENGHT_ERROR: tls.AlertDescription.DECODE_ERROR,
tls.ServerIssue.SNI_NO_HOSTNAME: tls.AlertDescription.HANDSHAKE_FAILURE,
tls.ServerIssue.FFDH_GROUP_UNKNOWN: tls.AlertDescription.ILLEGAL_PARAMETER,
tls.ServerIssue.MESSAGE_LENGTH_ERROR: tls.AlertDescription.DECODE_ERROR,
tls.ServerIssue.INCOMPATIBLE_KEY_EXCHANGE: tls.AlertDescription.HANDSHAKE_FAILURE,
tls.ServerIssue.PARAMETER_LENGTH_ERROR: tls.AlertDescription.DECODE_ERROR,
tls.ServerIssue.RECORD_TOO_SHORT: tls.AlertDescription.BAD_RECORD_MAC,
tls.ServerIssue.RECORD_MAC_INVALID: tls.AlertDescription.BAD_RECORD_MAC,
tls.ServerIssue.RECORD_WRONG_PADDING_LENGTH: tls.AlertDescription.BAD_RECORD_MAC,
tls.ServerIssue.RECORD_WRONG_PADDING_BYTES: tls.AlertDescription.BAD_RECORD_MAC,
tls.ServerIssue.ILLEGAL_PARAMETER_VALUE: tls.AlertDescription.ILLEGAL_PARAMETER,
}
| 42.520019
| 87
| 0.732955
| 11,178
| 91,333
| 5.63947
| 0.019771
| 0.030061
| 0.128684
| 0.122958
| 0.955297
| 0.947365
| 0.935214
| 0.930074
| 0.925045
| 0.920334
| 0
| 0.048085
| 0.177088
| 91,333
| 2,147
| 88
| 42.539823
| 0.790644
| 0.013193
| 0
| 0.76535
| 0
| 0
| 0.002098
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.00238
| 0
| 0.00238
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
da1cccb8011a75655b0e7c2f8262d0d69cb18047
| 30
|
py
|
Python
|
orion_core/backend/base_backend.py
|
nightred/orion-core
|
ad80eb9c18559f32e165245b1aa2e3b0927c5f9e
|
[
"MIT"
] | null | null | null |
orion_core/backend/base_backend.py
|
nightred/orion-core
|
ad80eb9c18559f32e165245b1aa2e3b0927c5f9e
|
[
"MIT"
] | null | null | null |
orion_core/backend/base_backend.py
|
nightred/orion-core
|
ad80eb9c18559f32e165245b1aa2e3b0927c5f9e
|
[
"MIT"
] | null | null | null |
def test():
return True
| 6
| 15
| 0.566667
| 4
| 30
| 4.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 30
| 4
| 16
| 7.5
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
da8bb5adb5897112bee0c41b83b9dc534aa9c4d1
| 1,200
|
py
|
Python
|
.venv/lib/python3.8/site-packages/vectorbt/utils/widgets.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
.venv/lib/python3.8/site-packages/vectorbt/utils/widgets.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
.venv/lib/python3.8/site-packages/vectorbt/utils/widgets.py
|
eo1989/VectorBTanalysis
|
bea3deaf2ee3fc114b308146f2af3e4f35f70197
|
[
"MIT"
] | null | null | null |
"""Utilities for displaying widgets."""
import plotly.graph_objects as go
from vectorbt import defaults
class CustomFigure(go.Figure):
"""Subclass of the `plotly.graph_objects.Figure` class initialized
with default parameters from `vectorbt.defaults.layout`."""
def __init__(self, *args, **kwargs):
layout = kwargs.pop('layout', {})
super().__init__(*args, **kwargs)
self.update_layout(**defaults.layout)
self.update_layout(**layout)
def show_png(self):
"""Display the widget in PNG format."""
self.show(renderer="png", width=self.layout.width, height=self.layout.height)
class CustomFigureWidget(go.FigureWidget):
"""Subclass of the `plotly.graph_objects.FigureWidget` class initialized
with default parameters from `vectorbt.defaults.layout`."""
def __init__(self, *args, **kwargs):
layout = kwargs.pop('layout', {})
super().__init__(*args, **kwargs)
self.update_layout(**defaults.layout)
self.update_layout(**layout)
def show_png(self):
"""Display the widget in PNG format."""
self.show(renderer="png", width=self.layout.width, height=self.layout.height)
| 33.333333
| 85
| 0.671667
| 142
| 1,200
| 5.5
| 0.302817
| 0.071703
| 0.081946
| 0.048656
| 0.791293
| 0.791293
| 0.711908
| 0.711908
| 0.711908
| 0.711908
| 0
| 0
| 0.188333
| 1,200
| 35
| 86
| 34.285714
| 0.801848
| 0.290833
| 0
| 0.777778
| 0
| 0
| 0.02214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e51abedd55c304596a5c3619a1b51177daf726e0
| 37,267
|
py
|
Python
|
codegen/funcs1_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | 2
|
2017-08-28T08:41:16.000Z
|
2018-05-29T03:49:36.000Z
|
codegen/funcs1_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
codegen/funcs1_testgen.py
|
m1griffin/arrayfunc
|
df57097699c25d3e949e1ade307ed61eaa5728c2
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
##############################################################################
# Project: arrayfunc
# Purpose: Generate the unit tests for math functions which use one input parameter.
# Language: Python 3.5
# Date: 08-Dec-2017
#
###############################################################################
#
# Copyright 2014 - 2017 Michael Griffin <m12.griffin@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
# ==============================================================================
import itertools
import codegen_common
# ==============================================================================
# This template is for operators which do not use a second parameter.
test_template_noparams = '''
##############################################################################
class %(funclabel)s_general_%(arrayevenodd)s_arraysize_%(simdpresent)s_simd_%(typelabel)s(unittest.TestCase):
"""Test for basic general function operation.
test_template_noparams
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
if '%(arrayevenodd)s' == 'even':
testdatasize = 160
if '%(arrayevenodd)s' == 'odd':
testdatasize = 159
paramitersize = 5
xdata = [x for x,y in zip(itertools.cycle([%(test_op_x)s]), range(testdatasize))]
self.data = array.array('%(typecode)s', xdata)
self.dataout = array.array('%(typecode)s', [0]*len(self.data))
self.limited = len(self.data) // 2
# The expected results.
self.expected = [%(pyoperator)s(x) for x in self.data]
# The expected results when the maxlen parameter is used.
self.expectedlimiteddata = self.expected[0:self.limited] + list(self.data)[self.limited:]
# The same, but where dataout is used as one of the sources.
self.expectedlimiteddataout = self.expected[0:self.limited] + list(self.dataout)[self.limited:]
########################################################
def test_%(funclabel)s_basic_array_none_a1(self):
"""Test %(funclabel)s as *array-none* for basic function - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.data), self.expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_none_a2(self):
"""Test %(funclabel)s as *array-none* for basic function with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, matherrors=True %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.data), self.expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_none_a3(self):
"""Test %(funclabel)s as *array-none* for basic function with maxlen - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, maxlen=self.limited %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.data), self.expectedlimiteddata):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_none_a4(self):
"""Test %(funclabel)s as *array-none* for basic function with maxlen and matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, maxlen=self.limited, matherrors=True %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.data), self.expectedlimiteddata):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_array_b1(self):
"""Test %(funclabel)s as *array-array* for basic function - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, self.dataout %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.dataout), self.expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_array_b2(self):
"""Test %(funclabel)s as *array-array* for basic function with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, self.dataout, matherrors=True %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.dataout), self.expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_array_b3(self):
"""Test %(funclabel)s as *array-array* for basic function with maxlen - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, self.dataout, maxlen=self.limited %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.dataout), self.expectedlimiteddataout):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_basic_array_array_b4(self):
"""Test %(funclabel)s as *array-array* for basic function with maxlen and matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.data, self.dataout, maxlen=self.limited, matherrors=True %(nosimd)s)
for dataoutitem, expecteditem in zip(list(self.dataout), self.expectedlimiteddataout):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for testing invalid parameter types.
param_invalid_template = '''
##############################################################################
class %(funclabel)s_param_errors_%(typelabel)s(unittest.TestCase):
"""Test for invalid parameters.
param_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.floatarray = array.array('%(typecode)s', [%(test_op_x)s])
arraysize = len(self.floatarray)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
# Create some integer array equivalents.
self.intarray = array.array('i', [int(x) for x in self.floatarray])
self.intdataout = array.array('i', [int(x) for x in self.dataout])
########################################################
def test_%(funclabel)s_array_none_a1(self):
"""Test %(funclabel)s as *array-none* for integer array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.intarray)
########################################################
def test_%(funclabel)s_array_none_b1(self):
"""Test %(funclabel)s as *array-none* for matherrors='a' - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
floatarray = copy.copy(self.floatarray)
# This version is expected to pass.
arrayfunc.%(funcname)s(floatarray, matherrors=True)
floatarray = copy.copy(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(floatarray, matherrors='a')
########################################################
def test_%(funclabel)s_array_none_b2(self):
"""Test %(funclabel)s as *array-none* for maxlen='a' - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
floatarray = copy.copy(self.floatarray)
testmaxlen = len(floatarray) // 2
# This version is expected to pass.
arrayfunc.%(funcname)s(floatarray, maxlen=testmaxlen)
floatarray = copy.copy(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(floatarray, maxlen='a')
########################################################
def test_%(funclabel)s_array_array_c1(self):
"""Test %(funclabel)s as *array-array* for integer array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.intarray, self.dataout)
########################################################
def test_%(funclabel)s_array_array_c2(self):
"""Test %(funclabel)s as *array-array* for integer output array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.floatarray, self.intdataout)
########################################################
def test_%(funclabel)s_array_array_c3(self):
"""Test %(funclabel)s as *array-array* for integer input and output array - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.intarray, self.intdataout)
########################################################
def test_%(funclabel)s_array_num_array_d1(self):
"""Test %(funclabel)s as *array-num-array* for matherrors='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout, matherrors=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.floatarray, self.dataout, matherrors='a')
########################################################
def test_%(funclabel)s_array_array_d2(self):
"""Test %(funclabel)s as *array-array* for maxlen='a' - Array code %(typelabel)s.
"""
testmaxlen = len(self.floatarray) // 2
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout, maxlen=testmaxlen)
floatarray = copy.copy(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.floatarray, self.dataout, maxlen='a')
########################################################
def test_%(funclabel)s_no_params_e1(self):
"""Test %(funclabel)s with no parameters - Array code %(typelabel)s.
"""
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s()
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for testing invalid parameter types
# for "nosimd". This is used only for those functions which support SIMD.
param_nosimd_invalid_template = '''
##############################################################################
class %(funclabel)s_param_nosimd_errors_%(typelabel)s(unittest.TestCase):
"""Test for invalid nosimd parameters.
param_nosimd_invalid_template
"""
########################################################
def setUp(self):
"""Initialise.
"""
self.floatarray = array.array('%(typecode)s', [%(test_op_x)s])
arraysize = len(self.floatarray)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, arraysize))
# Create some integer array equivalents.
self.intarray = array.array('i', [int(x) for x in self.floatarray])
self.intdataout = array.array('i', [int(x) for x in self.dataout])
########################################################
def test_%(funclabel)s_array_none_b1(self):
"""Test %(funclabel)s as *array-none* for nosimd='a' - Array code %(typelabel)s.
"""
# Copy the array so we don't change the original data.
floatarray = copy.copy(self.floatarray)
# This version is expected to pass.
arrayfunc.%(funcname)s(floatarray, nosimd=True)
floatarray = copy.copy(self.floatarray)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(floatarray, nosimd='a')
########################################################
def test_%(funclabel)s_array_num_array_d1(self):
"""Test %(funclabel)s as *array-num-array* for nosimd='a' - Array code %(typelabel)s.
"""
# This version is expected to pass.
arrayfunc.%(funcname)s(self.floatarray, self.dataout, nosimd=True)
# This is the actual test.
with self.assertRaises(TypeError):
arrayfunc.%(funcname)s(self.floatarray, self.dataout, nosimd='a')
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are expected and no second parameter is present.
nan_data_error_noparam_template = '''
##############################################################################
class %(funclabel)s_nandata_exceptions_%(testarray)s_%(typelabel)s(unittest.TestCase):
"""Test for basic general function operation.
nan_data_error_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, 10))
self.datainf = array.array('%(typecode)s', [math.inf] * 10)
self.datanan = array.array('%(typecode)s', [math.nan] * 10)
self.dataninf = array.array('%(typecode)s', [-math.inf] * 10)
########################################################
def test_%(funclabel)s_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout)
########################################################
def test_%(funclabel)s_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.data%(testarray)s)
########################################################
def test_%(funclabel)s_ov_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout, matherrors=True)
########################################################
def test_%(funclabel)s_ov_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, matherrors=True)
##############################################################################
'''
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are expected and no second parameter is present. When
# matherrors checking is turned off, the results are checked.
nan_data_errorchecked_noparam_template = '''
##############################################################################
class %(funclabel)s_nandata_exceptions_%(testarray)s_%(typelabel)s(unittest.TestCase):
"""Test for basic general function operation.
nan_data_errorchecked_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, 10))
self.datainf = array.array('%(typecode)s', [math.inf] * 10)
self.datanan = array.array('%(typecode)s', [math.nan] * 10)
self.dataninf = array.array('%(typecode)s', [-math.inf] * 10)
########################################################
def test_%(funclabel)s_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout)
########################################################
def test_%(funclabel)s_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
with self.assertRaises(ArithmeticError):
arrayfunc.%(funcname)s(self.data%(testarray)s)
########################################################
def test_%(funclabel)s_ov_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_ov_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.data%(testarray)s), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# The template used to generate the tests for nan, inf, -inf in data arrays
# when exceptions are not expected.
nan_data_noerror_noparam_template = '''
##############################################################################
class %(funclabel)s_nandata_errorchecked_%(testarray)s_%(typelabel)s(unittest.TestCase):
"""Test for basic general function operation.
nan_data_noerror_noparam_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, 10))
self.datainf = array.array('%(typecode)s', [math.inf] * 10)
self.datanan = array.array('%(typecode)s', [math.nan] * 10)
self.dataninf = array.array('%(typecode)s', [-math.inf] * 10)
########################################################
def test_%(funclabel)s_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking on and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s)
for dataoutitem, expecteditem in zip(list(self.data%(testarray)s), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_ov_outputarray(self):
"""Test %(funclabel)s for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_ov_inplace(self):
"""Test %(funclabel)s in place for data of %(testlabel)s with matherrors checking off and single parameter functions - Array code %(typelabel)s.
"""
# Calculate the expected result.
expected = [%(pyoperator)s(x) for x in self.data%(testarray)s]
# This is the actual test.
arrayfunc.%(funcname)s(self.data%(testarray)s, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.data%(testarray)s), expected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# The template used to generate the tests for isnan and isinf only
nan_data_isnanisinftest_template = '''
##############################################################################
class %(funclabel)s_isnanisinftest_%(typelabel)s(unittest.TestCase):
"""Test for invalid parameters.
nan_data_isnanisinftest_template
"""
##############################################################################
def FloatassertEqual(self, expecteditem, dataoutitem, msg=None):
"""This function is patched into assertEqual to allow testing for
the floating point special values NaN, Inf, and -Inf.
"""
# NaN cannot be compared using normal means.
if math.isnan(dataoutitem) and math.isnan(expecteditem):
pass
# Anything else can be compared normally.
else:
if not math.isclose(expecteditem, dataoutitem, rel_tol=0.01, abs_tol=0.0):
raise self.failureException('%%0.3f != %%0.3f' %% (expecteditem, dataoutitem))
########################################################
def setUp(self):
"""Initialise.
"""
self.addTypeEqualityFunc(float, self.FloatassertEqual)
self.floatarray = array.array('%(typecode)s', [%(test_op_x)s])
floatarraysize = len(self.floatarray)
self.dataout = array.array('%(typecode)s', itertools.repeat(0.0, floatarraysize))
# This interleaves ordinaray float data with nan, inf, and -inf.
self.testarray = array.array('%(typecode)s', itertools.chain.from_iterable([(x,y) for (x,y) in zip(itertools.cycle([math.nan, math.inf, -math.inf]), self.floatarray)]))
# These are the expected results from tests.
self.floatexpected = [float(%(pyoperator)s(x)) for x in self.floatarray]
self.testexpected = [float(%(pyoperator)s(x)) for x in self.testarray]
testarraysize = len(self.testexpected)
self.testdataout = array.array('%(typecode)s', itertools.repeat(0.0, testarraysize))
########################################################
def test_%(funclabel)s_array_none_a1(self):
"""Test %(funclabel)s as *array-none* for float data - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.floatarray)
for dataoutitem, expecteditem in zip(list(self.floatarray), self.floatexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_none_a2(self):
"""Test %(funclabel)s as *array-none* for mixed float, nan, inf array - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.testarray)
for dataoutitem, expecteditem in zip(list(self.testarray), self.testexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_none_b1(self):
"""Test %(funclabel)s as *array-none* for float data with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.floatarray, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.floatarray), self.floatexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_none_b2(self):
"""Test %(funclabel)s as *array-none* for mixed float, nan, inf data with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.testarray, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.testarray), self.testexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_none_c1(self):
"""Test %(funclabel)s as *array-none* for float data with maxlen=length//2 - Array code %(typelabel)s.
"""
testmaxlen = len(self.floatarray) // 2
floathalfexpected = copy.copy(self.floatexpected[0: testmaxlen] + list(self.floatarray)[testmaxlen :])
arrayfunc.%(funcname)s(self.floatarray, maxlen=testmaxlen)
for dataoutitem, expecteditem in zip(list(self.floatarray), floathalfexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_none_c2(self):
"""Test %(funclabel)s as *array-none* for mixed float, nan, inf with maxlen=length//2 - Array code %(typelabel)s.
"""
testmaxlen = len(self.testarray) // 2
testhalfexpected = self.testexpected[0: testmaxlen] + list(self.testarray)[testmaxlen :]
arrayfunc.%(funcname)s(self.testarray, maxlen=testmaxlen)
for dataoutitem, expecteditem in zip(list(self.testarray), testhalfexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_d1(self):
"""Test %(funclabel)s as *array-array* for float data with output array - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.floatarray, self.dataout)
for dataoutitem, expecteditem in zip(list(self.dataout), self.floatexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_d2(self):
"""Test %(funclabel)s as *array-array* for mixed float, nan, inf with output array - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.testarray, self.testdataout)
for dataoutitem, expecteditem in zip(list(self.testdataout), self.testexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_e1(self):
"""Test %(funclabel)s as *array-array* for float data with output array with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.floatarray, self.dataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.dataout), self.floatexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_e2(self):
"""Test %(funclabel)s as *array-array* for mixed float, nan, inf with output array with matherrors=True - Array code %(typelabel)s.
"""
arrayfunc.%(funcname)s(self.testarray, self.testdataout, matherrors=True)
for dataoutitem, expecteditem in zip(list(self.testdataout), self.testexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_f1(self):
"""Test %(funclabel)s as *array-array* for float data with output array with maxlen=length//2 - Array code %(typelabel)s.
"""
testmaxlen = len(self.floatarray) // 2
floathalfexpected = self.floatexpected[0: testmaxlen] + list(self.dataout)[testmaxlen :]
arrayfunc.%(funcname)s(self.floatarray, self.dataout, maxlen=testmaxlen)
for dataoutitem, expecteditem in zip(list(self.dataout), floathalfexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
########################################################
def test_%(funclabel)s_array_array_f2(self):
"""Test %(funclabel)s as *array-array* for mixed float, nan, inf with output array with maxlen=length//2 - Array code %(typelabel)s.
"""
testmaxlen = len(self.testarray) // 2
testhalfexpected = self.testexpected[0: testmaxlen] + list(self.testdataout)[testmaxlen :]
arrayfunc.%(funcname)s(self.testarray, self.testdataout, maxlen=testmaxlen)
for dataoutitem, expecteditem in zip(list(self.testdataout), testhalfexpected):
# The behavour of assertEqual is modified by addTypeEqualityFunc.
self.assertEqual(dataoutitem, expecteditem)
##############################################################################
'''
# ==============================================================================
# ==============================================================================
# These are all the test code templates.
test_templates = {'nan_data_error_noparam_template' : nan_data_error_noparam_template,
'nan_data_errorchecked_noparam_template' : nan_data_errorchecked_noparam_template,
'nan_data_noerror_noparam_template' : nan_data_noerror_noparam_template,
'nan_data_isnanisinftest_template' : nan_data_isnanisinftest_template,
}
# ==============================================================================
# Read in the op codes.
opdata = codegen_common.ReadINI('affuncdata.ini')
# Filter out the desired math functions.
funclist = [(x,dict(y)) for x,y in opdata.items() if y.get('test_op_templ') in ('test_template_noparams', 'test_template_noparams_1simd')]
# Create a list of names which support SIMD.
havesimd = [x for x,y in funclist if y.get('test_op_templ') == 'test_template_noparams_1simd']
# ==============================================================================
# This defines the module name.
modulename = 'arrayfunc'
# Import the array module for testing.
arrayimport = 'import array'
nantemplates = ['test_nan_data_template', 'test_inf_data_template', 'test_ninf_data_template']
nanfunclabel = ['nan', 'inf', 'ninf']
nantestlabel = ['nan', 'inf', '-inf']
for funcname, func in funclist:
filenamebase = 'test_' + funcname
filename = filenamebase + '.py'
headerdate = codegen_common.FormatHeaderData(filenamebase, '09-Dec-2017', funcname)
# Add additional header data.
headerdate['modulename'] = modulename
headerdate['arrayimport'] = arrayimport
with open(filename, 'w') as f:
# The copyright header.
f.write(codegen_common.HeaderTemplate % headerdate)
for functype in codegen_common.floatarrays:
funcdata = {'funclabel' : funcname,
'funcname' : funcname,
'pyoperator' : func['pyoperator'],
'typelabel' : functype,
'typecode' : functype,
'test_op_x' : func['test_op_x']
}
# Test for basic operation.
# Not all functions support SIMD operations.
if funcname in havesimd:
# With SIMD, even data arra size.
funcdata['simdpresent'] = 'with'
funcdata['nosimd'] = ''
funcdata['arrayevenodd'] = 'even'
f.write(test_template_noparams % funcdata)
# With SIMD, odd data array size.
funcdata['simdpresent'] = 'with'
funcdata['nosimd'] = ''
funcdata['arrayevenodd'] = 'odd'
f.write(test_template_noparams % funcdata)
# Without SIMD.
funcdata['simdpresent'] = 'without'
funcdata['nosimd'] = ', nosimd=True'
funcdata['arrayevenodd'] = 'even'
f.write(test_template_noparams % funcdata)
else:
# Without SIMD.
funcdata['simdpresent'] = 'without'
funcdata['nosimd'] = ''
funcdata['arrayevenodd'] = 'even'
f.write(test_template_noparams % funcdata)
#####
# Test for invalid parameters. One template should work for all
# functions of this style.
f.write(param_invalid_template % funcdata)
# This one is used only with functions that support SIMD.
if funcname in havesimd:
f.write(param_nosimd_invalid_template % funcdata)
#####
# Tests involving NaN, inf, and -inf.
for templatename, testarray, testlabel in zip(nantemplates, nanfunclabel, nantestlabel) :
testtemplate = test_templates[func[templatename]]
for functype in codegen_common.floatarrays:
funcdata = {'funclabel' : funcname, 'funcname' : funcname,
'pyoperator' : func['pyoperator'], 'typelabel' : functype,
'typecode' : functype, 'test_op_x' : func['test_op_x'],
'testarray' : testarray, 'testlabel' : testlabel}
f.write(testtemplate % funcdata)
f.write(codegen_common.testendtemplate % {'funcname' : funcname, 'testprefix' : 'af'})
# ==============================================================================
| 37.267
| 170
| 0.6213
| 4,198
| 37,267
| 5.435445
| 0.082658
| 0.040757
| 0.052765
| 0.044351
| 0.858883
| 0.84968
| 0.831493
| 0.80717
| 0.789771
| 0.756815
| 0
| 0.00523
| 0.127754
| 37,267
| 999
| 171
| 37.304304
| 0.696733
| 0.08211
| 0
| 0.722135
| 1
| 0.11303
| 0.925455
| 0.396678
| 0
| 0
| 0
| 0
| 0.128728
| 1
| 0
| false
| 0.023548
| 0.006279
| 0
| 0.006279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e51c9f992a6cd138774fe8586512ab66123b62e4
| 24,654
|
py
|
Python
|
data/migrations/0001_initial.py
|
NicolaDiLeva/CapecListWebApp
|
18b43b9b09df5a09ddcd7ece654f8f098456cb72
|
[
"Apache-2.0"
] | null | null | null |
data/migrations/0001_initial.py
|
NicolaDiLeva/CapecListWebApp
|
18b43b9b09df5a09ddcd7ece654f8f098456cb72
|
[
"Apache-2.0"
] | null | null | null |
data/migrations/0001_initial.py
|
NicolaDiLeva/CapecListWebApp
|
18b43b9b09df5a09ddcd7ece654f8f098456cb72
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.8 on 2020-07-31 17:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AuthGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
options={
'db_table': 'auth_group',
'managed': False,
},
),
migrations.CreateModel(
name='AuthGroupPermissions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_group_permissions',
'managed': False,
},
),
migrations.CreateModel(
name='AuthPermission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codename', models.CharField(max_length=100)),
('name', models.CharField(max_length=255)),
],
options={
'db_table': 'auth_permission',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128)),
('last_login', models.DateTimeField(blank=True, null=True)),
('is_superuser', models.BooleanField()),
('username', models.CharField(max_length=150, unique=True)),
('first_name', models.CharField(max_length=30)),
('email', models.CharField(max_length=254)),
('is_staff', models.BooleanField()),
('is_active', models.BooleanField()),
('date_joined', models.DateTimeField()),
('last_name', models.CharField(max_length=150)),
],
options={
'db_table': 'auth_user',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUserGroups',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_user_groups',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUserUserPermissions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_user_user_permissions',
'managed': False,
},
),
migrations.CreateModel(
name='ComprehensiveCapecDictionary',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Comprehensive CAPEC Dictionary',
'db_table': 'Comprehensive CAPEC Dictionary',
'managed': False,
},
),
migrations.CreateModel(
name='DeprecatedEntries',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Deprecated Entries',
'db_table': 'Deprecated Entries',
'managed': False,
},
),
migrations.CreateModel(
name='DetailedAbstractions',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Detailed Abstractions',
'db_table': 'Detailed Abstractions',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoAdminLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action_time', models.DateTimeField()),
('object_id', models.TextField(blank=True, null=True)),
('object_repr', models.CharField(max_length=200)),
('change_message', models.TextField()),
('action_flag', models.PositiveSmallIntegerField()),
],
options={
'db_table': 'django_admin_log',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoContentType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('app_label', models.CharField(max_length=100)),
('model', models.CharField(max_length=100)),
],
options={
'db_table': 'django_content_type',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoMigrations',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('app', models.CharField(max_length=255)),
('name', models.CharField(max_length=255)),
('applied', models.DateTimeField()),
],
options={
'db_table': 'django_migrations',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoSession',
fields=[
('session_key', models.CharField(max_length=40, primary_key=True, serialize=False)),
('session_data', models.TextField()),
('expire_date', models.DateTimeField()),
],
options={
'db_table': 'django_session',
'managed': False,
},
),
migrations.CreateModel(
name='DomainsOfAttack',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Domains of Attack',
'db_table': 'Domains of Attack',
'managed': False,
},
),
migrations.CreateModel(
name='MechanismsOfAttack',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Mechanisms of Attack',
'db_table': 'Mechanisms of Attack',
'managed': False,
},
),
migrations.CreateModel(
name='MetaAbstractions',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Meta Abstractions',
'db_table': 'Meta Abstractions',
'managed': False,
},
),
migrations.CreateModel(
name='MobileDevicePatterns',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Mobile Device Patterns',
'db_table': 'Mobile Device Patterns',
'managed': False,
},
),
migrations.CreateModel(
name='StandardAbstractions',
fields=[
('id', models.AutoField(db_column='ID', primary_key=True, serialize=False)),
('name', models.CharField(blank=True, db_column='Name', max_length=200, null=True)),
('abstraction', models.CharField(blank=True, db_column='Abstraction', max_length=10, null=True)),
('status', models.CharField(blank=True, db_column='Status', max_length=10, null=True)),
('description', models.TextField(blank=True, db_column='Description', null=True)),
('alternateterms', models.TextField(blank=True, db_column='AlternateTerms', null=True)),
('likelihoodofattack', models.CharField(blank=True, db_column='LikelihoodOfAttack', max_length=10, null=True)),
('typicalseverity', models.CharField(blank=True, db_column='TypicalSeverity', max_length=10, null=True)),
('relatedattackpatterns', models.TextField(blank=True, db_column='RelatedAttackPatterns', null=True)),
('executionflow', models.TextField(blank=True, db_column='ExecutionFlow', null=True)),
('prerequisites', models.TextField(blank=True, db_column='Prerequisites', null=True)),
('skillsrequired', models.TextField(blank=True, db_column='SkillsRequired', null=True)),
('resourcesrequired', models.TextField(blank=True, db_column='ResourcesRequired', null=True)),
('indicators', models.TextField(blank=True, db_column='Indicators', null=True)),
('consequences', models.TextField(blank=True, db_column='Consequences', null=True)),
('mitigations', models.TextField(blank=True, db_column='Mitigations', null=True)),
('exampleinstances', models.TextField(blank=True, db_column='ExampleInstances', null=True)),
('relatedweaknesses', models.TextField(blank=True, db_column='RelatedWeaknesses', null=True)),
('taxonomymappings', models.TextField(blank=True, db_column='TaxonomyMappings', null=True)),
('notes', models.TextField(blank=True, db_column='Notes', null=True)),
],
options={
'verbose_name_plural': 'Standard Abstractions',
'db_table': 'Standard Abstractions',
'managed': False,
},
),
]
| 64.878947
| 127
| 0.599903
| 2,303
| 24,654
| 6.284846
| 0.064264
| 0.088434
| 0.115517
| 0.178527
| 0.901271
| 0.858367
| 0.832873
| 0.821542
| 0.821542
| 0.821542
| 0
| 0.007776
| 0.254117
| 24,654
| 379
| 128
| 65.050132
| 0.779325
| 0.001825
| 0
| 0.75
| 1
| 0
| 0.2201
| 0.017678
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.002688
| 0.002688
| 0
| 0.013441
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e529fd51d33907effbea27f0b7fc7de63bcc3992
| 242
|
py
|
Python
|
src/latte/metrics/keras/interpolatability.py
|
SoftwareImpacts/SIMPAC-2021-192
|
92c6eb8bb8b1f45b0b86d222b87b2f1e4e949d04
|
[
"MIT"
] | 1
|
2021-12-21T00:38:21.000Z
|
2021-12-21T00:38:21.000Z
|
src/latte/metrics/keras/interpolatability.py
|
SoftwareImpacts/SIMPAC-2021-192
|
92c6eb8bb8b1f45b0b86d222b87b2f1e4e949d04
|
[
"MIT"
] | null | null | null |
src/latte/metrics/keras/interpolatability.py
|
SoftwareImpacts/SIMPAC-2021-192
|
92c6eb8bb8b1f45b0b86d222b87b2f1e4e949d04
|
[
"MIT"
] | null | null | null |
from .wrapper import KerasMetricWrapper
from ..core import interpolatability as C
from functools import partial
Smoothness = partial(KerasMetricWrapper, metric=C.Smoothness)
Monotonicity = partial(KerasMetricWrapper, metric=C.Monotonicity)
| 30.25
| 65
| 0.838843
| 26
| 242
| 7.807692
| 0.5
| 0.246305
| 0.305419
| 0.315271
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099174
| 242
| 7
| 66
| 34.571429
| 0.931193
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.6
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e52a0495eff32c2512b2db9ff0142f5140b928b8
| 111
|
py
|
Python
|
NodeDefender/mqtt/message/respond/icpe/sys/sys.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 4
|
2016-09-23T17:51:05.000Z
|
2017-03-14T02:52:26.000Z
|
NodeDefender/mqtt/message/respond/icpe/sys/sys.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 1
|
2016-09-22T11:32:33.000Z
|
2017-11-14T10:00:24.000Z
|
NodeDefender/mqtt/message/respond/icpe/sys/sys.py
|
CTSNE/NodeDefender
|
24e19f53a27d3b53e599cba8b1448f8f16c0bd5e
|
[
"MIT"
] | 4
|
2016-10-09T19:05:16.000Z
|
2020-05-14T04:00:30.000Z
|
import NodeDefender
def reboot(topic, payload):
return True
def battery(topic, payload):
return True
| 13.875
| 28
| 0.72973
| 14
| 111
| 5.785714
| 0.642857
| 0.296296
| 0.444444
| 0.54321
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198198
| 111
| 7
| 29
| 15.857143
| 0.910112
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
e54c46aa1d058a32bc19375a4df2eead6bdba36d
| 43
|
py
|
Python
|
python/testData/refactoring/move/importForMovedElementWithPreferredQualifiedImportStyle/after/src/a.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/refactoring/move/importForMovedElementWithPreferredQualifiedImportStyle/after/src/a.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/refactoring/move/importForMovedElementWithPreferredQualifiedImportStyle/after/src/a.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
from b import bar
def foo():
bar()
| 5.375
| 17
| 0.534884
| 7
| 43
| 3.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.348837
| 43
| 7
| 18
| 6.142857
| 0.821429
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
006c62fb1fa2946f9ec4427dda13b903db0df6d4
| 43,542
|
py
|
Python
|
tests/model_execution/test_bash.py
|
OasisLMF/OasisLMF
|
141a1bf4b6cacb3b71c4216bf0997b6b9b85e1d2
|
[
"BSD-3-Clause"
] | 88
|
2018-03-24T11:57:10.000Z
|
2022-03-21T13:04:41.000Z
|
tests/model_execution/test_bash.py
|
OasisLMF/OasisLMF
|
141a1bf4b6cacb3b71c4216bf0997b6b9b85e1d2
|
[
"BSD-3-Clause"
] | 558
|
2018-03-14T14:16:30.000Z
|
2022-03-29T12:48:14.000Z
|
tests/model_execution/test_bash.py
|
OasisLMF/OasisLMF
|
141a1bf4b6cacb3b71c4216bf0997b6b9b85e1d2
|
[
"BSD-3-Clause"
] | 41
|
2018-04-09T11:13:12.000Z
|
2021-10-05T14:43:11.000Z
|
import hashlib
import io
import json
import os
import shutil
from tempfile import NamedTemporaryFile
from unittest import TestCase
from oasislmf.model_execution.bash import genbash, create_bash_outputs, create_bash_analysis, bash_wrapper, bash_params
from oasislmf.utils import diff
TEST_DIRECTORY = os.path.dirname(__file__)
class Genbash(TestCase):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "cov_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "cov_kparse_reference")
# defaults
cls.ri_iterations = 0
cls.gul_alloc_rule = 0
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 2
cls.num_gul_per_lb = 0
cls.num_fm_per_lb = 0
cls.event_shuffle = 1
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = True
cls.fmpy = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
def setUp(self):
self.temp_reference_file = None
def tearDown(self):
if self.temp_reference_file is not None:
# If already closed, no exception is raised
self.temp_reference_file.close()
os.remove(self.temp_reference_file.name)
def md5(self, fname):
hash_md5 = hashlib.md5()
with io.open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def genbash(self, name, num_partitions,
num_reinsurance_iterations=None,
fifo_tmp_dir=None,
stderr_guard=None,
gul_alloc_rule=None,
il_alloc_rule=None,
ri_alloc_rule=None,
bash_trace=None,
gul_legacy_stream=None,
fmpy=None):
input_filename = os.path.join(self.KPARSE_INPUT_FOLDER, "{}.json".format(name))
if not num_reinsurance_iterations:
output_filename = os.path.join(self.KPARSE_OUTPUT_FOLDER, "{}_{}_partition.sh".format(name, num_partitions))
else:
output_filename = os.path.join(
self.KPARSE_OUTPUT_FOLDER,
"{}_{}_reins_layer_{}_partition.sh".format(name, num_reinsurance_iterations, num_partitions))
with io.open(input_filename, encoding='utf-8') as file:
analysis_settings = json.load(file)['analysis_settings']
genbash(
num_partitions,
analysis_settings,
filename=output_filename,
num_reinsurance_iterations=(num_reinsurance_iterations or self.ri_iterations),
fifo_tmp_dir=(fifo_tmp_dir or self.fifo_tmp_dir),
stderr_guard=(stderr_guard or self.stderr_guard),
gul_alloc_rule=(gul_alloc_rule or self.gul_alloc_rule),
il_alloc_rule=(il_alloc_rule or self.il_alloc_rule),
ri_alloc_rule=(ri_alloc_rule or self.ri_alloc_rule),
num_gul_per_lb=self.num_gul_per_lb,
num_fm_per_lb=self.num_fm_per_lb,
event_shuffle=self.event_shuffle,
bash_trace=(bash_trace or self.bash_trace),
gul_legacy_stream=(gul_legacy_stream or self.gul_legacy_stream),
fmpy=(fmpy or self.fmpy),
)
def gen_chunked_bash(self, name,
num_partitions,
num_reinsurance_iterations=None,
fifo_tmp_dir=None,
stderr_guard=None,
gul_alloc_rule=None,
il_alloc_rule=None,
ri_alloc_rule=None,
bash_trace=None,
gul_legacy_stream=None,
fmpy=None):
input_filename = os.path.join(self.KPARSE_INPUT_FOLDER, "{}.json".format(name))
if not num_reinsurance_iterations:
output_filename = os.path.join(self.KPARSE_OUTPUT_FOLDER, "{}_{}_partition".format(name, num_partitions))
else:
output_filename = os.path.join(
self.KPARSE_OUTPUT_FOLDER,
"{}_{}_reins_layer_{}_partition".format(name, num_reinsurance_iterations, num_partitions))
with io.open(input_filename, encoding='utf-8') as file:
analysis_settings = json.load(file)['analysis_settings']
params = bash_params(
max_process_id=num_partitions,
analysis_settings=analysis_settings,
num_reinsurance_iterations=(num_reinsurance_iterations or self.ri_iterations),
fifo_tmp_dir=(fifo_tmp_dir or self.fifo_tmp_dir),
stderr_guard=(stderr_guard or self.stderr_guard),
gul_alloc_rule=(gul_alloc_rule or self.gul_alloc_rule),
il_alloc_rule=(il_alloc_rule or self.il_alloc_rule),
ri_alloc_rule=(ri_alloc_rule or self.ri_alloc_rule),
num_gul_per_lb=self.num_gul_per_lb,
num_fm_per_lb=self.num_fm_per_lb,
event_shuffle=self.event_shuffle,
bash_trace=(bash_trace or self.bash_trace),
gul_legacy_stream=(gul_legacy_stream or self.gul_legacy_stream),
fmpy=(fmpy or self.fmpy),
)
## debug
#print(json.dumps(params, indent=4))
fifo_tmp_dir = params['fifo_tmp_dir']
for process_id in range(num_partitions):
params['filename'] = f'{output_filename}.{process_id}.sh'
# remove the file if it already exists
if os.path.exists(params['filename']):
os.remove(params['filename'])
with bash_wrapper(params['filename'], bash_trace or self.bash_trace, stderr_guard or self.stderr_guard):
create_bash_analysis(
**{
**params,
'process_number': process_id + 1,
'fifo_tmp_dir': fifo_tmp_dir,
}
)
fifo_tmp_dir = False
# remove the file if it already exists
params['filename'] = f'{output_filename}.output.sh'
if os.path.exists(params['filename']):
os.remove(params['filename'])
with bash_wrapper(params['filename'], bash_trace or self.bash_trace, stderr_guard or self.stderr_guard):
create_bash_outputs(**params)
def check_chunks(self, name, num_partitions):
for i in range(num_partitions):
self.check(f'{name}.{i}')
self.check(f'{name}.output')
def check(self, name, reference_filename=None):
pass
output_filename = os.path.join(self.KPARSE_OUTPUT_FOLDER, "{}.sh".format(name))
if not reference_filename:
reference_filename = os.path.join(self.KPARSE_REFERENCE_FOLDER, "{}.sh".format(name))
if self.fifo_tmp_dir:
# Create temp Ref file
ref_template = reference_filename
ref_tmp_file = NamedTemporaryFile("w+", delete=False)
with io.open(output_filename, 'r') as f:
for line in f:
if '/tmp/' in line:
tmp_fifo_dir = line.split('/')[2]
break
# Replace placeholder '%FIFO_DIR%' with '<RandomDirName>'
with io.open(ref_template, 'r') as f:
ktools_script = f.read()
ktools_script = ktools_script.replace('%FIFO_DIR%', tmp_fifo_dir)
ref_tmp_file.write(ktools_script)
ref_tmp_file.close()
reference_filename = ref_tmp_file.name
d = diff.unified_diff(reference_filename, output_filename, as_string=True)
if d:
self.fail(d)
def test_gul_summarycalc_1_partition(self):
self.genbash("gul_summarycalc_1_output", 1)
self.check("gul_summarycalc_1_output_1_partition")
def test_gul_summarycalc_20_partition(self):
self.genbash("gul_summarycalc_1_output", 20)
self.check("gul_summarycalc_1_output_20_partition")
def test_gul_eltcalc_1_partition(self):
self.genbash("gul_eltcalc_1_output", 1)
self.check("gul_eltcalc_1_output_1_partition")
def test_gul_eltcalc_20_partition(self):
self.genbash("gul_eltcalc_1_output", 20)
self.check("gul_eltcalc_1_output_20_partition")
def test_gul_aalcalc_1_partition(self):
self.genbash("gul_aalcalc_1_output", 1)
self.check("gul_aalcalc_1_output_1_partition")
def test_gul_aalcalc_20_partition(self):
self.genbash("gul_aalcalc_1_output", 20)
self.check("gul_aalcalc_1_output_20_partition")
def test_gul_pltcalc_1_partition(self):
self.genbash("gul_pltcalc_1_output", 1)
self.check("gul_pltcalc_1_output_1_partition")
def test_gul_pltcalc_20_partition(self):
self.genbash("gul_pltcalc_1_output", 20)
self.check("gul_pltcalc_1_output_20_partition")
def test_gul_agg_fu_lec_1_partition(self):
self.genbash("gul_agg_fu_lec_1_output", 1)
self.check("gul_agg_fu_lec_1_output_1_partition")
def test_gul_agg_fu_lec_20_partition(self):
self.genbash("gul_agg_fu_lec_1_output", 20)
self.check("gul_agg_fu_lec_1_output_20_partition")
def test_gul_occ_fu_lec_1_output_1_partition(self):
self.genbash("gul_occ_fu_lec_1_output", 1)
self.check("gul_occ_fu_lec_1_output_1_partition")
def test_gul_occ_fu_lec_1_output_20_partition(self):
self.genbash("gul_occ_fu_lec_1_output", 20)
self.check("gul_occ_fu_lec_1_output_20_partition")
def test_gul_agg_ws_lec_1_partition(self):
self.genbash("gul_agg_ws_lec_1_output", 1)
self.check("gul_agg_ws_lec_1_output_1_partition")
def test_gul_agg_ws_lec_20_partition(self):
self.genbash("gul_agg_ws_lec_1_output", 20)
self.check("gul_agg_ws_lec_1_output_20_partition")
def test_gul_occ_ws_lec_1_partition(self):
self.genbash("gul_occ_ws_lec_1_output", 1)
self.check("gul_occ_ws_lec_1_output_1_partition")
def test_gul_occ_ws_lec_20_partition(self):
self.genbash("gul_occ_ws_lec_1_output", 20)
self.check("gul_occ_ws_lec_1_output_20_partition")
def test_gul_agg_ws_mean_lec_1_partition(self):
self.genbash("gul_agg_ws_mean_lec_1_output", 1)
self.check("gul_agg_ws_mean_lec_1_output_1_partition")
def test_gul_agg_ws_mean_lec_20_partition(self):
self.genbash("gul_agg_ws_mean_lec_1_output", 20)
self.check("gul_agg_ws_mean_lec_1_output_20_partition")
def test_gul_occ_ws_mean_lec_1_partition(self):
self.genbash("gul_occ_ws_mean_lec_1_output", 1)
self.check("gul_occ_ws_mean_lec_1_output_1_partition")
def test_gul_occ_ws_mean_lec_20_partition(self):
self.genbash("gul_occ_ws_mean_lec_1_output", 20)
self.check("gul_occ_ws_mean_lec_1_output_20_partition")
def test_il_agg_sample_mean_lec_1_partition(self):
self.genbash("il_agg_sample_mean_lec_1_output", 1)
self.check("il_agg_sample_mean_lec_1_output_1_partition")
def test_il_agg_sample_mean_lec_20_partition(self):
self.genbash("il_agg_sample_mean_lec_1_output", 20)
self.check("il_agg_sample_mean_lec_1_output_20_partition")
def test_il_occ_sample_mean_lec_1_partition(self):
self.genbash("il_occ_sample_mean_lec_1_output", 1)
self.check("il_occ_sample_mean_lec_1_output_1_partition")
def test_il_occ_sample_mean_lec_20_partition(self):
self.genbash("il_occ_sample_mean_lec_1_output", 20)
self.check("il_occ_sample_mean_lec_1_output_20_partition")
def test_il_summarycalc_1_partition(self):
self.genbash("il_summarycalc_1_output", 1)
self.check("il_summarycalc_1_output_1_partition")
def test_il_summarycalc_20_partition(self):
self.genbash("il_summarycalc_1_output", 20)
self.check("il_summarycalc_1_output_20_partition")
def test_il_eltcalc_1_partition(self):
self.genbash("il_eltcalc_1_output", 1)
self.check("il_eltcalc_1_output_1_partition")
def test_il_eltcalc_20_partition(self):
self.genbash("il_eltcalc_1_output", 20)
self.check("il_eltcalc_1_output_20_partition")
def test_il_aalcalc_1_partition(self):
self.genbash("il_aalcalc_1_output", 1)
self.check("il_aalcalc_1_output_1_partition")
def test_il_aalcalc_20_partition(self):
self.genbash("il_aalcalc_1_output", 20)
self.check("il_aalcalc_1_output_20_partition")
def test_il_pltcalc_1_partition(self):
self.genbash("il_pltcalc_1_output", 1)
self.check("il_pltcalc_1_output_1_partition")
def test_il_pltcalc_20_partition(self):
self.genbash("il_pltcalc_1_output", 20)
self.check("il_pltcalc_1_output_20_partition")
def test_il_agg_fu_lec_1_partition(self):
self.genbash("il_agg_fu_lec_1_output", 1)
self.check("il_agg_fu_lec_1_output_1_partition")
def test_il_agg_fu_lec_20_partition(self):
self.genbash("il_agg_fu_lec_1_output", 20)
self.check("il_agg_fu_lec_1_output_20_partition")
def test_il_occ_fu_lec_1_output_1_partition(self):
self.genbash("il_occ_fu_lec_1_output", 1)
self.check("il_occ_fu_lec_1_output_1_partition")
def test_il_occ_fu_lec_1_output_20_partition(self):
self.genbash("il_occ_fu_lec_1_output", 20)
self.check("il_occ_fu_lec_1_output_20_partition")
def test_il_agg_ws_lec_1_partition(self):
self.genbash("il_agg_ws_lec_1_output", 1)
self.check("il_agg_ws_lec_1_output_1_partition")
def test_il_agg_ws_lec_20_partition(self):
self.genbash("il_agg_ws_lec_1_output", 20)
self.check("il_agg_ws_lec_1_output_20_partition")
def test_il_occ_ws_lec_1_partition(self):
self.genbash("il_occ_ws_lec_1_output", 1)
self.check("il_occ_ws_lec_1_output_1_partition")
def test_il_occ_ws_lec_20_partition(self):
self.genbash("il_occ_ws_lec_1_output", 20)
self.check("il_occ_ws_lec_1_output_20_partition")
def test_il_agg_ws_mean_lec_1_partition(self):
self.genbash("il_agg_ws_mean_lec_1_output", 1)
self.check("il_agg_ws_mean_lec_1_output_1_partition")
def test_il_agg_ws_mean_lec_20_partition(self):
self.genbash("il_agg_ws_mean_lec_1_output", 20)
self.check("il_agg_ws_mean_lec_1_output_20_partition")
def test_il_occ_ws_mean_lec_1_partition(self):
self.genbash("il_occ_ws_mean_lec_1_output", 1)
self.check("il_occ_ws_mean_lec_1_output_1_partition")
def test_il_occ_ws_mean_lec_20_partition(self):
self.genbash("il_occ_ws_mean_lec_1_output", 20)
self.check("il_occ_ws_mean_lec_1_output_20_partition")
def test_il_agg_sample_mean_lec_1_output_1_partition(self):
self.genbash("il_agg_sample_mean_lec_1_output", 1)
self.check("il_agg_sample_mean_lec_1_output_1_partition")
def test_il_agg_sample_mean_lec_1_output_20_partition(self):
self.genbash("il_agg_sample_mean_lec_1_output", 20)
self.check("il_agg_sample_mean_lec_1_output_20_partition")
def test_il_occ_sample_mean_lec_1_output_1_partition(self):
self.genbash("il_occ_sample_mean_lec_1_output", 1)
self.check("il_occ_sample_mean_lec_1_output_1_partition")
def test_il_occ_sample_mean_lec_1_output_20_partition(self):
self.genbash("il_occ_sample_mean_lec_1_output", 20)
self.check("il_occ_sample_mean_lec_1_output_20_partition")
def test_all_calcs_1_partition(self):
self.genbash("all_calcs_1_output", 1)
self.check("all_calcs_1_output_1_partition")
def test_all_calcs_20_partition(self):
self.genbash("all_calcs_1_output", 20)
self.check("all_calcs_1_output_20_partition")
def test_all_calcs_40_partition(self):
self.genbash("all_calcs_1_output", 40)
self.check("all_calcs_1_output_40_partition")
def test_gul_no_lec_1_output_1_partition(self):
self.genbash("gul_no_lec_1_output", 1)
self.check("gul_no_lec_1_output_1_partition")
def test_gul_no_lec_1_output_2_partition(self):
self.genbash("gul_no_lec_1_output", 2)
self.check("gul_no_lec_1_output_2_partition")
def test_gul_no_lec_2_output_1_partition(self):
self.genbash("gul_no_lec_2_output", 1)
self.check("gul_no_lec_2_output_1_partition")
def test_gul_no_lec_2_output_2_partitions(self):
self.genbash("gul_no_lec_2_output", 2)
self.check("gul_no_lec_2_output_2_partition")
def test_gul_lec_1_output_1_partition(self):
self.genbash("gul_lec_1_output", 1)
self.check("gul_lec_1_output_1_partition")
def test_gul_lec_1_output_2_partitions(self):
self.genbash("gul_lec_1_output", 2)
self.check("gul_lec_1_output_2_partition")
def test_gul_lec_2_output_1_partition(self):
self.genbash("gul_lec_2_output", 1)
self.check("gul_lec_2_output_1_partition")
def test_gul_lec_2_output_2_partitions(self):
self.genbash("gul_lec_2_output", 2)
self.check("gul_lec_2_output_2_partition")
def test_il_no_lec_1_output_1_partition(self):
self.genbash("il_no_lec_1_output", 1)
self.check("il_no_lec_1_output_1_partition")
def test_il_no_lec_1_output_2_partition(self):
self.genbash("il_no_lec_1_output", 2)
self.check("il_no_lec_1_output_2_partition")
def test_il_no_lec_2_output_1_partition(self):
self.genbash("il_no_lec_2_output", 1)
self.check("il_no_lec_2_output_1_partition")
def test_il_no_lec_2_output_2_partitions(self):
self.genbash("il_no_lec_2_output", 2)
self.check("il_no_lec_2_output_2_partition")
def test_il_lec_1_output_1_partition(self):
self.genbash("il_lec_1_output", 1)
self.check("il_lec_1_output_1_partition")
def test_il_lec_1_output_2_partitions(self):
self.genbash("il_lec_1_output", 2)
self.check("il_lec_1_output_2_partition")
def test_il_lec_2_output_1_partition(self):
self.genbash("il_lec_2_output", 1)
self.check("il_lec_2_output_1_partition")
def test_il_lec_2_output_2_partitions(self):
self.genbash("il_lec_2_output", 2)
self.check("il_lec_2_output_2_partition")
def test_gul_il_no_lec_1_output_1_partition(self):
self.genbash("gul_il_no_lec_1_output", 1)
self.check("gul_il_no_lec_1_output_1_partition")
def test_gul_il_no_lec_1_output_2_partition(self):
self.genbash("gul_il_no_lec_1_output", 2)
self.check("gul_il_no_lec_1_output_2_partition")
def test_gul_il_no_lec_2_output_1_partition(self):
self.genbash("gul_il_no_lec_2_output", 1)
self.check("gul_il_no_lec_2_output_1_partition")
def test_gul_il_no_lec_2_output_2_partitions(self):
self.genbash("gul_il_no_lec_2_output", 2)
self.check("gul_il_no_lec_2_output_2_partition")
def test_gul_il_lec_1_output_1_partition(self):
self.genbash("gul_il_lec_1_output", 1)
self.check("gul_il_lec_1_output_1_partition")
def test_gul_il_lec_1_output_2_partitions(self):
self.genbash("gul_il_lec_1_output", 2)
self.check("gul_il_lec_1_output_2_partition")
def test_gul_il_lec_2_output_1_partition(self):
self.genbash("gul_il_lec_2_output", 1)
self.check("gul_il_lec_2_output_1_partition")
def test_gul_il_lec_2_output_2_partitions(self):
self.genbash("gul_il_lec_2_output", 2)
self.check("gul_il_lec_2_output_2_partition")
def test_gul_il_lec_2_output_10_partitions(self):
self.genbash("gul_il_lec_2_output", 10)
self.check("gul_il_lec_2_output_10_partition")
# RI checks
def test_analysis_settings_1(self):
self.genbash("analysis_settings_1", 1)
self.check("analysis_settings_1_1_partition")
def test_analysis_settings_2(self):
self.genbash("analysis_settings_2", 1)
self.check("analysis_settings_2_1_partition")
def test_analysis_settings_3_0_reins_iters(self):
self.genbash("analysis_settings_3", 1, 1)
self.check("analysis_settings_3_1_reins_layer_1_partition")
def test_analysis_settings_4_0_reins_iters(self):
self.genbash("analysis_settings_4", 1, 1)
self.check("analysis_settings_4_1_reins_layer_1_partition")
def test_analysis_settings_5_0_reins_iters(self):
self.genbash("analysis_settings_5", 1, 1)
self.check("analysis_settings_5_1_reins_layer_1_partition")
# ORD checks
def test_gul_ord_ept_1_output_1_partitions(self):
self.genbash("gul_ord_ept_1_output", 1)
self.check("gul_ord_ept_1_output_1_partition")
def test_gul_ord_ept_1_output_20_partitions(self):
self.genbash("gul_ord_ept_1_output", 20)
self.check("gul_ord_ept_1_output_20_partition")
def test_gul_ord_psept_2_output_10_partitions(self):
self.genbash("gul_ord_psept_2_output", 10)
self.check("gul_ord_psept_2_output_10_partition")
def test_gul_ord_ept_psept_lec_2_output_10_partitions(self):
self.genbash("gul_ord_ept_psept_lec_2_output", 10)
self.check("gul_ord_ept_psept_lec_2_output_10_partition")
def test_gul_il_ord_ept_psept_2_output_10_partitions(self):
self.genbash("gul_il_ord_ept_psept_2_output", 10)
self.check("gul_il_ord_ept_psept_2_output_10_partition")
def test_gul_il_ord_psept_lec_1_output_10_partitions(self):
self.genbash("gul_il_ord_psept_lec_1_output", 10)
self.check("gul_il_ord_psept_lec_1_output_10_partition")
def test_gul_ord_palt_output_10_partitions(self):
self.genbash("gul_ord_palt_output", 10)
self.check("gul_ord_palt_output_10_partition")
def test_gul_il_ord_palt_output_10_partitions(self):
self.genbash("gul_il_ord_palt_output", 10)
self.check("gul_il_ord_palt_output_10_partition")
# =============================================================================
# chunked analysis checks
# =============================================================================
def test_gul_summarycalc_1_partition_chunk(self):
self.gen_chunked_bash("gul_summarycalc_1_output", 1)
self.check_chunks("gul_summarycalc_1_output_1_partition", 1)
def test_gul_summarycalc_20_partition_chunk(self):
self.gen_chunked_bash("gul_summarycalc_1_output", 20)
self.check_chunks("gul_summarycalc_1_output_20_partition", 20)
def test_gul_eltcalc_1_partition_chunk(self):
self.gen_chunked_bash("gul_eltcalc_1_output", 1)
self.check_chunks("gul_eltcalc_1_output_1_partition", 1)
def test_gul_eltcalc_20_partition_chunk(self):
self.gen_chunked_bash("gul_eltcalc_1_output", 20)
self.check_chunks("gul_eltcalc_1_output_20_partition", 20)
def test_gul_aalcalc_1_partition_chunk(self):
self.gen_chunked_bash("gul_aalcalc_1_output", 1)
self.check_chunks("gul_aalcalc_1_output_1_partition", 1)
def test_gul_aalcalc_20_partition_chunk(self):
self.gen_chunked_bash("gul_aalcalc_1_output", 20)
self.check_chunks("gul_aalcalc_1_output_20_partition", 20)
def test_gul_pltcalc_1_partition_chunk(self):
self.gen_chunked_bash("gul_pltcalc_1_output", 1)
self.check_chunks("gul_pltcalc_1_output_1_partition", 1)
def test_gul_pltcalc_20_partition_chunk(self):
self.gen_chunked_bash("gul_pltcalc_1_output", 20)
self.check_chunks("gul_pltcalc_1_output_20_partition", 20)
def test_gul_agg_fu_lec_1_partition_chunk(self):
self.gen_chunked_bash("gul_agg_fu_lec_1_output", 1)
self.check_chunks("gul_agg_fu_lec_1_output_1_partition", 1)
def test_gul_agg_fu_lec_20_partition_chunk(self):
self.gen_chunked_bash("gul_agg_fu_lec_1_output", 20)
self.check_chunks("gul_agg_fu_lec_1_output_20_partition", 20)
def test_gul_occ_fu_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_occ_fu_lec_1_output", 1)
self.check_chunks("gul_occ_fu_lec_1_output_1_partition", 1)
def test_gul_occ_fu_lec_1_output_20_partition_chunk(self):
self.gen_chunked_bash("gul_occ_fu_lec_1_output", 20)
self.check_chunks("gul_occ_fu_lec_1_output_20_partition", 20)
def test_gul_agg_ws_lec_1_partition_chunk(self):
self.gen_chunked_bash("gul_agg_ws_lec_1_output", 1)
self.check_chunks("gul_agg_ws_lec_1_output_1_partition", 1)
def test_gul_agg_ws_lec_20_partition_chunk(self):
self.gen_chunked_bash("gul_agg_ws_lec_1_output", 20)
self.check_chunks("gul_agg_ws_lec_1_output_20_partition", 20)
def test_gul_occ_ws_lec_1_partition_chunk(self):
self.gen_chunked_bash("gul_occ_ws_lec_1_output", 1)
self.check_chunks("gul_occ_ws_lec_1_output_1_partition", 1)
def test_gul_occ_ws_lec_20_partition_chunk(self):
self.gen_chunked_bash("gul_occ_ws_lec_1_output", 20)
self.check_chunks("gul_occ_ws_lec_1_output_20_partition", 20)
def test_gul_agg_ws_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("gul_agg_ws_mean_lec_1_output", 1)
self.check_chunks("gul_agg_ws_mean_lec_1_output_1_partition", 1)
def test_gul_agg_ws_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("gul_agg_ws_mean_lec_1_output", 20)
self.check_chunks("gul_agg_ws_mean_lec_1_output_20_partition", 20)
def test_gul_occ_ws_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("gul_occ_ws_mean_lec_1_output", 1)
self.check_chunks("gul_occ_ws_mean_lec_1_output_1_partition", 1)
def test_gul_occ_ws_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("gul_occ_ws_mean_lec_1_output", 20)
self.check_chunks("gul_occ_ws_mean_lec_1_output_20_partition", 20)
def test_il_agg_sample_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_agg_sample_mean_lec_1_output", 1)
self.check_chunks("il_agg_sample_mean_lec_1_output_1_partition", 1)
def test_il_agg_sample_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_agg_sample_mean_lec_1_output", 20)
self.check_chunks("il_agg_sample_mean_lec_1_output_20_partition", 20)
def test_il_occ_sample_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_occ_sample_mean_lec_1_output", 1)
self.check_chunks("il_occ_sample_mean_lec_1_output_1_partition", 1)
def test_il_occ_sample_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_occ_sample_mean_lec_1_output", 20)
self.check_chunks("il_occ_sample_mean_lec_1_output_20_partition", 20)
def test_il_summarycalc_1_partition_chunk(self):
self.gen_chunked_bash("il_summarycalc_1_output", 1)
self.check_chunks("il_summarycalc_1_output_1_partition", 1)
def test_il_summarycalc_20_partition_chunk(self):
self.gen_chunked_bash("il_summarycalc_1_output", 20)
self.check_chunks("il_summarycalc_1_output_20_partition", 20)
def test_il_eltcalc_1_partition_chunk(self):
self.gen_chunked_bash("il_eltcalc_1_output", 1)
self.check_chunks("il_eltcalc_1_output_1_partition", 1)
def test_il_eltcalc_20_partition_chunk(self):
self.gen_chunked_bash("il_eltcalc_1_output", 20)
self.check_chunks("il_eltcalc_1_output_20_partition", 20)
def test_il_aalcalc_1_partition_chunk(self):
self.gen_chunked_bash("il_aalcalc_1_output", 1)
self.check_chunks("il_aalcalc_1_output_1_partition", 1)
def test_il_aalcalc_20_partition_chunk(self):
self.gen_chunked_bash("il_aalcalc_1_output", 20)
self.check_chunks("il_aalcalc_1_output_20_partition", 20)
def test_il_pltcalc_1_partition_chunk(self):
self.gen_chunked_bash("il_pltcalc_1_output", 1)
self.check_chunks("il_pltcalc_1_output_1_partition", 1)
def test_il_pltcalc_20_partition_chunk(self):
self.gen_chunked_bash("il_pltcalc_1_output", 20)
self.check_chunks("il_pltcalc_1_output_20_partition", 20)
def test_il_agg_fu_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_agg_fu_lec_1_output", 1)
self.check_chunks("il_agg_fu_lec_1_output_1_partition", 1)
def test_il_agg_fu_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_agg_fu_lec_1_output", 20)
self.check_chunks("il_agg_fu_lec_1_output_20_partition", 20)
def test_il_occ_fu_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("il_occ_fu_lec_1_output", 1)
self.check_chunks("il_occ_fu_lec_1_output_1_partition", 1)
def test_il_occ_fu_lec_1_output_20_partition_chunk(self):
self.gen_chunked_bash("il_occ_fu_lec_1_output", 20)
self.check_chunks("il_occ_fu_lec_1_output_20_partition", 20)
def test_il_agg_ws_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_agg_ws_lec_1_output", 1)
self.check_chunks("il_agg_ws_lec_1_output_1_partition", 1)
def test_il_agg_ws_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_agg_ws_lec_1_output", 20)
self.check_chunks("il_agg_ws_lec_1_output_20_partition", 20)
def test_il_occ_ws_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_occ_ws_lec_1_output", 1)
self.check_chunks("il_occ_ws_lec_1_output_1_partition", 1)
def test_il_occ_ws_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_occ_ws_lec_1_output", 20)
self.check_chunks("il_occ_ws_lec_1_output_20_partition", 20)
def test_il_agg_ws_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_agg_ws_mean_lec_1_output", 1)
self.check_chunks("il_agg_ws_mean_lec_1_output_1_partition", 1)
def test_il_agg_ws_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_agg_ws_mean_lec_1_output", 20)
self.check_chunks("il_agg_ws_mean_lec_1_output_20_partition", 20)
def test_il_occ_ws_mean_lec_1_partition_chunk(self):
self.gen_chunked_bash("il_occ_ws_mean_lec_1_output", 1)
self.check_chunks("il_occ_ws_mean_lec_1_output_1_partition", 1)
def test_il_occ_ws_mean_lec_20_partition_chunk(self):
self.gen_chunked_bash("il_occ_ws_mean_lec_1_output", 20)
self.check_chunks("il_occ_ws_mean_lec_1_output_20_partition", 20)
def test_il_agg_sample_mean_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("il_agg_sample_mean_lec_1_output", 1)
self.check_chunks("il_agg_sample_mean_lec_1_output_1_partition", 1)
def test_il_agg_sample_mean_lec_1_output_20_partition_chunk(self):
self.gen_chunked_bash("il_agg_sample_mean_lec_1_output", 20)
self.check_chunks("il_agg_sample_mean_lec_1_output_20_partition", 20)
def test_il_occ_sample_mean_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("il_occ_sample_mean_lec_1_output", 1)
self.check_chunks("il_occ_sample_mean_lec_1_output_1_partition", 1)
def test_il_occ_sample_mean_lec_1_output_20_partition_chunk(self):
self.gen_chunked_bash("il_occ_sample_mean_lec_1_output", 20)
self.check_chunks("il_occ_sample_mean_lec_1_output_20_partition", 20)
def test_all_calcs_1_partition_chunk(self):
self.gen_chunked_bash("all_calcs_1_output", 1)
self.check_chunks("all_calcs_1_output_1_partition", 1)
def test_all_calcs_20_partition_chunk(self):
self.gen_chunked_bash("all_calcs_1_output", 20)
self.check_chunks("all_calcs_1_output_20_partition", 20)
def test_all_calcs_40_partition_chunk(self):
self.gen_chunked_bash("all_calcs_1_output", 40)
self.check_chunks("all_calcs_1_output_40_partition", 40)
def test_gul_no_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_no_lec_1_output", 1)
self.check_chunks("gul_no_lec_1_output_1_partition", 1)
def test_gul_no_lec_1_output_2_partition_chunk(self):
self.gen_chunked_bash("gul_no_lec_1_output", 2)
self.check_chunks("gul_no_lec_1_output_2_partition", 2)
def test_gul_no_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_no_lec_2_output", 1)
self.check_chunks("gul_no_lec_2_output_1_partition", 1)
def test_gul_no_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_no_lec_2_output", 2)
self.check_chunks("gul_no_lec_2_output_2_partition", 2)
def test_gul_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_lec_1_output", 1)
self.check_chunks("gul_lec_1_output_1_partition", 1)
def test_gul_lec_1_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_lec_1_output", 2)
self.check_chunks("gul_lec_1_output_2_partition", 2)
def test_gul_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_lec_2_output", 1)
self.check_chunks("gul_lec_2_output_1_partition", 1)
def test_gul_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_lec_2_output", 2)
self.check_chunks("gul_lec_2_output_2_partition", 2)
def test_il_no_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("il_no_lec_1_output", 1)
self.check_chunks("il_no_lec_1_output_1_partition", 1)
def test_il_no_lec_1_output_2_partition_chunk(self):
self.gen_chunked_bash("il_no_lec_1_output", 2)
self.check_chunks("il_no_lec_1_output_2_partition", 2)
def test_il_no_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("il_no_lec_2_output", 1)
self.check_chunks("il_no_lec_2_output_1_partition", 1)
def test_il_no_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("il_no_lec_2_output", 2)
self.check_chunks("il_no_lec_2_output_2_partition", 2)
def test_il_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("il_lec_1_output", 1)
self.check_chunks("il_lec_1_output_1_partition", 1)
def test_il_lec_1_output_2_partitions_chunk(self):
self.gen_chunked_bash("il_lec_1_output", 2)
self.check_chunks("il_lec_1_output_2_partition", 2)
def test_il_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("il_lec_2_output", 1)
self.check_chunks("il_lec_2_output_1_partition", 1)
def test_il_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("il_lec_2_output", 2)
self.check_chunks("il_lec_2_output_2_partition", 2)
def test_gul_il_no_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_il_no_lec_1_output", 1)
self.check_chunks("gul_il_no_lec_1_output_1_partition", 1)
def test_gul_il_no_lec_1_output_2_partition_chunk(self):
self.gen_chunked_bash("gul_il_no_lec_1_output", 2)
self.check_chunks("gul_il_no_lec_1_output_2_partition", 2)
def test_gul_il_no_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_il_no_lec_2_output", 1)
self.check_chunks("gul_il_no_lec_2_output_1_partition", 1)
def test_gul_il_no_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_il_no_lec_2_output", 2)
self.check_chunks("gul_il_no_lec_2_output_2_partition", 2)
def test_gul_il_lec_1_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_il_lec_1_output", 1)
self.check_chunks("gul_il_lec_1_output_1_partition", 1)
def test_gul_il_lec_1_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_il_lec_1_output", 2)
self.check_chunks("gul_il_lec_1_output_2_partition", 2)
def test_gul_il_lec_2_output_1_partition_chunk(self):
self.gen_chunked_bash("gul_il_lec_2_output", 1)
self.check_chunks("gul_il_lec_2_output_1_partition", 1)
def test_gul_il_lec_2_output_2_partitions_chunk(self):
self.gen_chunked_bash("gul_il_lec_2_output", 2)
self.check_chunks("gul_il_lec_2_output_2_partition", 2)
def test_gul_il_lec_2_output_10_partitions_chunk(self):
self.gen_chunked_bash("gul_il_lec_2_output", 10)
self.check_chunks("gul_il_lec_2_output_10_partition", 10)
def test_analysis_settings_1_chunk(self):
self.gen_chunked_bash("analysis_settings_1", 1)
self.check_chunks("analysis_settings_1_1_partition", 1)
def test_analysis_settings_2_chunk(self):
self.gen_chunked_bash("analysis_settings_2", 1)
self.check_chunks("analysis_settings_2_1_partition", 1)
def test_analysis_settings_3_0_reins_iters_chunk(self):
self.gen_chunked_bash("analysis_settings_3", 1, 1)
self.check_chunks("analysis_settings_3_1_reins_layer_1_partition", 1)
def test_analysis_settings_4_0_reins_iters_chunk(self):
self.gen_chunked_bash("analysis_settings_4", 1, 1)
self.check_chunks("analysis_settings_4_1_reins_layer_1_partition", 1)
class Genbash_GulItemStream(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "itm_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "itm_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_ErrorGuard(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "err_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "err_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = True
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_TempDir(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "tmp_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "tmp_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = True
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_FullCorrItemStream(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "fc_kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "itm_fc_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "itm_fc_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_FullCorrErrorGuard(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "fc_kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "err_fc_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "err_fc_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = True
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_FullCorrTempDir(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "fc_kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "tmp_fc_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "tmp_fc_kparse_reference")
cls.ri_iterations = 0
cls.gul_alloc_rule = 1
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 3
cls.fifo_tmp_dir = True
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_LoadBanlancerFmpy(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "lb_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "lb_kparse_reference")
# defaults
cls.ri_iterations = 0
cls.gul_alloc_rule = 0
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 2
cls.num_gul_per_lb = 2
cls.num_fm_per_lb = 2
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
cls.fmpy = True
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
class Genbash_EventShuffle(Genbash):
@classmethod
def setUpClass(cls):
# test dirs
cls.KPARSE_INPUT_FOLDER = os.path.join(TEST_DIRECTORY, "kparse_input")
cls.KPARSE_OUTPUT_FOLDER = os.path.join(TEST_DIRECTORY, "eve_kparse_output")
cls.KPARSE_REFERENCE_FOLDER = os.path.join(TEST_DIRECTORY, "eve_kparse_reference")
# defaults
cls.ri_iterations = 0
cls.gul_alloc_rule = 0
cls.il_alloc_rule = 2
cls.ri_alloc_rule = 2
cls.num_gul_per_lb = 2
cls.num_fm_per_lb = 2
cls.event_shuffle = 3
cls.fifo_tmp_dir = False
cls.bash_trace = False
cls.stderr_guard = False
cls.gul_legacy_stream = False
if os.path.exists(cls.KPARSE_OUTPUT_FOLDER):
shutil.rmtree(cls.KPARSE_OUTPUT_FOLDER)
os.makedirs(cls.KPARSE_OUTPUT_FOLDER)
| 40.884507
| 120
| 0.717261
| 6,764
| 43,542
| 4.059432
| 0.028533
| 0.076735
| 0.079758
| 0.043266
| 0.938197
| 0.92738
| 0.907022
| 0.843142
| 0.744665
| 0.636681
| 0
| 0.036364
| 0.194111
| 43,542
| 1,064
| 121
| 40.922932
| 0.746139
| 0.0127
| 0
| 0.278121
| 0
| 0
| 0.241609
| 0.19075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.228677
| false
| 0.001236
| 0.011125
| 0
| 0.252163
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
da9ad3270b34c42e9aec4e3dd748608563410452
| 11,044
|
py
|
Python
|
ui/send_json.py
|
globocom/gsenha
|
ba057d03fa68cdc608a0ea31000de817f5d88098
|
[
"MIT"
] | 22
|
2016-07-08T19:31:54.000Z
|
2022-03-21T18:45:34.000Z
|
ui/send_json.py
|
globocom/gsenha
|
ba057d03fa68cdc608a0ea31000de817f5d88098
|
[
"MIT"
] | 8
|
2021-02-22T14:53:48.000Z
|
2022-03-29T22:27:50.000Z
|
ui/send_json.py
|
globocom/gsenha
|
ba057d03fa68cdc608a0ea31000de817f5d88098
|
[
"MIT"
] | 6
|
2016-09-12T07:40:16.000Z
|
2021-09-19T18:35:34.000Z
|
# -*- coding: utf-8 -*-
import json, requests, sys, settings
class SendJson:
def send_get_passwords(self,token):
url_gsenha = settings.URL_GSENHA_PASSWORDS
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer}
req = requests.get(url_gsenha, headers=headers, verify=True)
return req
def send_login(self,user,passwd):
url_gsenha = settings.URL_GSENHA_LOGIN
data = {}
data["username"] = user
data["password"] = passwd
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_user(self,user,passwd,pk):
url_gsenha = settings.URL_GSENHA_USER
data = {}
data["user"] = user
data["password"] = passwd
data["pubkey"] = pk
headers = {'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_personal(self,token,name,passwd,folder,login,url,description):
url_gsenha = settings.URL_GSENHA_ADDPERSONAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["folder"] = folder
data["login"] = login
data["url"] = url
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_personal_url(self,token,name,passwd,folder,login,description):
url_gsenha = settings.URL_GSENHA_ADDPERSONAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["folder"] = folder
data["login"] = login
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_group(self,token,name,passwd,folder,group,login,url,description):
url_gsenha = settings.URL_GSENHA_ADDSHARED
data = {}
data["name"] = name
data["passwd"] = passwd
data["group"] = group
data["folder"] = folder
data["login"] = login
data["url"] = url
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_group_url(self,token,name,passwd,folder,group,login,description):
url_gsenha = settings.URL_GSENHA_ADDSHARED
data = {}
data["name"] = name
data["passwd"] = passwd
data["group"] = group
data["folder"] = folder
data["login"] = login
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_personal_ext(self,token,name,passwd,username,login,url,description):
url_gsenha = settings.URL_GSENHA_ADDPERSONALEXTERNAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["username"] = username
data["login"] = login
data["url"] = url
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_personal_ext_url(self,token,name,passwd,username,login,description):
url_gsenha = settings.URL_GSENHA_ADDPERSONALEXTERNAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["username"] = username
data["login"] = login
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_group_ext(self,token,name,passwd,group,login,url,description):
url_gsenha = settings.URL_GSENHA_ADDSHAREDEXTERNAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["group"] = group
data["login"] = login
data["url"] = url
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_password_group_ext_url(self,token,name,passwd,group,login,description):
url_gsenha = settings.URL_GSENHA_ADDSHAREDEXTERNAL
data = {}
data["name"] = name
data["passwd"] = passwd
data["group"] = group
data["login"] = login
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_add_folder(self,token,path,name):
url_gsenha = settings.URL_GSENHA_ADDFODLER
data = {}
data["path"] = path
data["name"] = name
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_del_folder(self,token,folder):
url_gsenha = settings.URL_GSENHA_DELFOLDER
data = {}
data["folder"] = folder
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_get_folders(self,token):
url_gsenha = settings.URL_GSENHA_GETFOLDERS
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer}
req = requests.get(url_gsenha, headers=headers, verify=True)
return req
def send_get_groups(self,token):
url_gsenha = settings.URL_GSENHA_GETGROUPS
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer}
req = requests.get(url_gsenha, headers=headers, verify=True)
return req
def send_get_mygroups(self,token):
url_gsenha = settings.URL_GSENHA_GETMYGROUPS
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer}
req = requests.get(url_gsenha, headers=headers, verify=True)
return req
def send_get_tree(self,token):
url_gsenha = settings.URL_GSENHA_GETTREE
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer}
req = requests.get(url_gsenha, headers=headers, verify=True)
return req
def send_unlock(self,token,group,usertounlock):
url_gsenha = settings.URL_GSENHA_UNLOCK
data = {}
data["group"] = group
data["usertounlock"] = usertounlock
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_unlock2(self,token,data):
url_gsenha = settings.URL_GSENHA_UNLOCK2
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_update(self,token,id_passwd,passwd,url,login,name,description):
url_gsenha = settings.URL_GSENHA_UPDATEPASSWD
data = {}
data["id"] = id_passwd
if passwd != None:
data["passwd"] = passwd
if url != None:
data["url"] = url
if login != None:
data["login"] = login
if name != None:
data["name"] = name
if description != None:
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_update_url(self,token,id_passwd,passwd,login,name,description):
url_gsenha = settings.URL_GSENHA_UPDATEPASSWD
data = {}
data["id"] = id_passwd
data["passwd"] = passwd
data["login"] = login
data["name"] = name
data["description"] = description
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_update_pubkey(self,token,pubkey,privkey):
url_gsenha = settings.URL_GSENHA_UPDATEPUBKEY
data = {}
data["pubkey"] = pubkey
data["privkey"] = privkey
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
def send_delete_password(self,token,idPassword):
url_gsenha = settings.URL_GSENHA_DELPASSWORD
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.delete(url_gsenha+"/"+idPassword, headers=headers, verify=True)
return req
def send_import(self,token,data):
url_gsenha = settings.URL_GSENHA_ADDPERSONAL
bearer = "Bearer "+str(token)
headers = {'Authorization':bearer,'Content-type': 'application/json', 'Accept': 'text/plain'}
req = requests.post(url_gsenha, data=json.dumps(data), headers=headers, verify=True)
return req
| 42.476923
| 109
| 0.626766
| 1,247
| 11,044
| 5.423416
| 0.06255
| 0.095815
| 0.060328
| 0.070974
| 0.882153
| 0.84844
| 0.824043
| 0.789295
| 0.766524
| 0.762236
| 0
| 0.000357
| 0.239678
| 11,044
| 260
| 110
| 42.476923
| 0.805049
| 0.001901
| 0
| 0.777778
| 0
| 0
| 0.153693
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102564
| false
| 0.145299
| 0.008547
| 0
| 0.217949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
daa19541908533d3ddd3980fad5fe05b9b70a767
| 234
|
py
|
Python
|
intervul/datFiles/problem_data/contact/__init__.py
|
mpacheco62/intervul
|
c0eaadf54580de4b3c2dea46e8f196eab52280e1
|
[
"MIT"
] | 1
|
2021-04-13T13:28:16.000Z
|
2021-04-13T13:28:16.000Z
|
intervul/datFiles/problem_data/contact/__init__.py
|
andresutrera/intervul
|
75c5f824067549b3ddcbe9fe667964fb85a05ce3
|
[
"MIT"
] | null | null | null |
intervul/datFiles/problem_data/contact/__init__.py
|
andresutrera/intervul
|
75c5f824067549b3ddcbe9fe667964fb85a05ce3
|
[
"MIT"
] | 1
|
2021-05-06T20:29:42.000Z
|
2021-05-06T20:29:42.000Z
|
from ._augmented_lagrange import Augmented_lagrange
from ._non_coincident_mesh import Non_coincident_mesh
from ._both_coincident_and_non_coincident_mesh import (
Both_coincident_and_non_coincident_mesh)
| 46.8
| 71
| 0.790598
| 28
| 234
| 5.928571
| 0.321429
| 0.313253
| 0.409639
| 0.277108
| 0.409639
| 0.409639
| 0
| 0
| 0
| 0
| 0
| 0
| 0.188034
| 234
| 4
| 72
| 58.5
| 0.873684
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
daaec7eb6bffff27ce5d379340bcd6211628cdde
| 160
|
py
|
Python
|
utils/box/__init__.py
|
Yang-Zhaowei/SSD.300-512
|
0d6766038bd3ee37036e4255713d5c06e81a83ed
|
[
"MIT"
] | 3
|
2020-05-23T02:32:08.000Z
|
2021-04-26T12:29:40.000Z
|
utils/box/__init__.py
|
Yang-Zhaowei/PowerBank
|
0d6766038bd3ee37036e4255713d5c06e81a83ed
|
[
"MIT"
] | null | null | null |
utils/box/__init__.py
|
Yang-Zhaowei/PowerBank
|
0d6766038bd3ee37036e4255713d5c06e81a83ed
|
[
"MIT"
] | null | null | null |
from .prior_box import PriorBox
from .box_utils import decode,nms
from .box_utils import match, log_sum_exp,match_ious,bbox_overlaps_iou, bbox_overlaps_giou
| 22.857143
| 90
| 0.84375
| 27
| 160
| 4.62963
| 0.62963
| 0.112
| 0.192
| 0.288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10625
| 160
| 6
| 91
| 26.666667
| 0.874126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
dacdbf3b19b92bcc73fa0c141bde710edc3a2626
| 1,986
|
py
|
Python
|
solver/test.py
|
BavoGoosens/Capita3
|
ec884b6acaf5fc1b22a58b02c65888d5b8b2029a
|
[
"MIT"
] | null | null | null |
solver/test.py
|
BavoGoosens/Capita3
|
ec884b6acaf5fc1b22a58b02c65888d5b8b2029a
|
[
"MIT"
] | null | null | null |
solver/test.py
|
BavoGoosens/Capita3
|
ec884b6acaf5fc1b22a58b02c65888d5b8b2029a
|
[
"MIT"
] | null | null | null |
from generator import *
g = Generator()
counter = 1
for i in range(0, 50):
time_span, omin, omax, dmin, dmax, demand = g.generate_parameters(timespan=5)
f = open('../tuning/instances/instance'+str(time_span)+'_'+str(i+1), "w")
candidate_id = str(time_span)+str(omin)+str(omax)+str(dmin)+str(dmax)+str(demand).replace(', ', '').replace('[', '').replace(']','')
line = str(counter)+" "+str(candidate_id)+" -t="+ str(time_span) + " --offdaymin=" + str(omin) + " --offdaymax=" \
+ str(omax) + " --ondaymin=" + str(dmin)+ " --ondaymax=" + str(dmax)+ " -d=" \
+ str(demand).replace('[', '').replace(']', '').replace(' ', '') + "\n"
f.write(line)
f.close()
counter += 1
for i in range(0,30):
time_span, omin, omax, dmin, dmax, demand = g.generate_parameters(timespan=7)
f = open('../tuning/instances/instance'+str(time_span)+'_'+str(i+1), "w")
candidate_id = str(time_span)+str(omin)+str(omax)+str(dmin)+str(dmax)+str(demand).replace(', ', '').replace(' ', '')
line = str(counter)+" "+str(candidate_id)+" -t="+ str(time_span) + " --offdaymin=" + str(omin) + " --offdaymax=" \
+ str(omax) + " --ondaymin=" + str(dmin)+ " --ondaymax=" + str(dmax)+ " -d=" \
+ str(demand).replace('[', '').replace(']', '').replace(' ', '') + "\n"
f.write(line)
f.close()
counter += 1
for i in range(0, 20):
time_span, omin, omax, dmin, dmax, demand = g.generate_parameters(timespan=14)
f = open('../tuning/instances/instance'+str(time_span)+'_'+str(i+1), "w")
candidate_id = str(time_span)+str(omin)+str(omax)+str(dmin)+str(dmax)+str(demand).replace(', ', '')
line = str(counter)+" "+str(candidate_id)+" -t="+ str(time_span) + " --offdaymin=" + str(omin) + " --offdaymax=" \
+ str(omax) + " --ondaymin=" + str(dmin)+ " --ondaymax=" + str(dmax)+ " -d=" \
+ str(demand).replace('[', '').replace(']', '').replace(' ', '') + "\n"
f.write(line)
f.close()
counter += 1
| 58.411765
| 136
| 0.556898
| 256
| 1,986
| 4.226563
| 0.183594
| 0.088725
| 0.091497
| 0.077634
| 0.963956
| 0.963956
| 0.963956
| 0.945471
| 0.945471
| 0.945471
| 0
| 0.012255
| 0.178248
| 1,986
| 34
| 137
| 58.411765
| 0.650735
| 0
| 0
| 0.636364
| 1
| 0
| 0.146452
| 0.042275
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.030303
| 0
| 0.030303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
972c3b7d0d117b5e70d92f377e3d4765d44b4618
| 18,572
|
py
|
Python
|
fixture/contacts.py
|
kolesnyknataly/python_training
|
56a13869f78a68d827c2ec7e1a5ec20227e462b4
|
[
"Apache-2.0"
] | null | null | null |
fixture/contacts.py
|
kolesnyknataly/python_training
|
56a13869f78a68d827c2ec7e1a5ec20227e462b4
|
[
"Apache-2.0"
] | null | null | null |
fixture/contacts.py
|
kolesnyknataly/python_training
|
56a13869f78a68d827c2ec7e1a5ec20227e462b4
|
[
"Apache-2.0"
] | null | null | null |
from model.contacts import Contacts
import re
import random
from selenium.webdriver.support.ui import Select
from model.group import Group
class ContactsHelpers:
def __init__(self, app):
self.app = app
def open_contacts_page(self):
wd = self.app.wd
if wd.current_url.endswith("/addressbook/"):
return
wd.find_element_by_link_text("home").click()
def open_add_contact_page(self):
wd = self.app.wd
if not (wd.current_url.endswith("/edit.php") and len(wd.find_elements_by_name("submit")) > 0):
wd.find_element_by_link_text("add new").click()
def select_contact_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_contact_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def find_contact_for_edit_by_id(self, id):
wd = self.app.wd
self.open_contacts_page()
wd.find_element_by_xpath("//a[@href='edit.php?id=%s']" % id).click()
def create(self, contacts):
wd = self.app.wd
self.open_add_contact_page()
# fill contact form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contacts.first_name)
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys(contacts.middle_name)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contacts.last_name)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contacts.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contacts.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contacts.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(contacts.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(contacts.home)
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys(contacts.mobile)
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys(contacts.work)
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys(contacts.fax)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contacts.email)
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys(contacts.email_2)
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys(contacts.email_3)
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys(contacts.homepage)
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys(contacts.address_2)
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys(contacts.phone2)
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys(contacts.notes)
# submit contact creation
wd.find_element_by_xpath("(//input[@name='submit'])[2]").click()
self.return_to_home_page()
self.contact_cache = None
def delete_contact_by_index(self, index):
wd = self.app.wd
self.open_contacts_page()
self.select_contact_by_index(index)
# submit deletion
wd.find_element_by_xpath("//*[@value='Delete']").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def delete_contact_by_id(self, id):
wd = self.app.wd
self.open_contacts_page()
self.select_contact_by_id(id)
# submit deletion
wd.find_element_by_xpath("//*[@value='Delete']").click()
wd.switch_to_alert().accept()
self.contact_cache = None
def delete_first_contact(self):
wd = self.app.wd
self.delete_contact_by_index(0)
def edit_contact_by_index(self, index, contacts):
wd = self.app.wd
self.open_contacts_page()
# self.select_contact_by_index(index)
# init contact editing
wd.find_elements_by_xpath('//img[@src="icons/pencil.png"]')[index].click()
# fill contact form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contacts.first_name)
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys(contacts.middle_name)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contacts.last_name)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contacts.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contacts.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contacts.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(contacts.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(contacts.home)
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys(contacts.mobile)
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys(contacts.work)
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys(contacts.fax)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contacts.email)
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys(contacts.email_2)
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys(contacts.email_3)
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys(contacts.homepage)
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys(contacts.address_2)
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys(contacts.phone2)
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys(contacts.notes)
# submit contact creation
wd.find_element_by_name("update").click()
self.return_to_home_page()
self.contact_cache = None
def edit_contact_by_id(self, id, contact):
wd = self.app.wd
self.open_contacts_page()
self.open_all_contacts_list()
# self.select_contact_by_index(index)
# init contact editing
self.find_contact_for_edit_by_id(id)
# fill contact form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(contact.first_name)
wd.find_element_by_name("middlename").click()
wd.find_element_by_name("middlename").clear()
wd.find_element_by_name("middlename").send_keys(contact.middle_name)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(contact.last_name)
wd.find_element_by_name("nickname").click()
wd.find_element_by_name("nickname").clear()
wd.find_element_by_name("nickname").send_keys(contact.nickname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(contact.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(contact.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(contact.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(contact.home)
wd.find_element_by_name("mobile").click()
wd.find_element_by_name("mobile").clear()
wd.find_element_by_name("mobile").send_keys(contact.mobile)
wd.find_element_by_name("work").click()
wd.find_element_by_name("work").clear()
wd.find_element_by_name("work").send_keys(contact.work)
wd.find_element_by_name("fax").click()
wd.find_element_by_name("fax").clear()
wd.find_element_by_name("fax").send_keys(contact.fax)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(contact.email)
wd.find_element_by_name("email2").click()
wd.find_element_by_name("email2").clear()
wd.find_element_by_name("email2").send_keys(contact.email_2)
wd.find_element_by_name("email3").click()
wd.find_element_by_name("email3").clear()
wd.find_element_by_name("email3").send_keys(contact.email_3)
wd.find_element_by_name("homepage").click()
wd.find_element_by_name("homepage").clear()
wd.find_element_by_name("homepage").send_keys(contact.homepage)
wd.find_element_by_name("address2").click()
wd.find_element_by_name("address2").clear()
wd.find_element_by_name("address2").send_keys(contact.address_2)
wd.find_element_by_name("phone2").click()
wd.find_element_by_name("phone2").clear()
wd.find_element_by_name("phone2").send_keys(contact.phone2)
wd.find_element_by_name("notes").click()
wd.find_element_by_name("notes").clear()
wd.find_element_by_name("notes").send_keys(contact.notes)
# submit contact creation
wd.find_element_by_name("update").click()
self.return_to_home_page()
self.contact_cache = None
def edit_first_contact(self):
wd = self.app.wd
self.edit_contact_by_index(0)
def return_to_home_page(self):
wd = self.app.wd
wd.find_element_by_link_text("home page").click()
def count(self):
wd = self.app.wd
self.open_contacts_page()
return len(wd.find_elements_by_name("selected[]"))
contact_cache = None
def get_contact_list(self):
if self.contact_cache is None:
wd = self.app.wd
self.open_contacts_page()
self.contact_cache = []
for element in wd.find_elements_by_css_selector("tr"):
if element.get_attribute("name") != 'entry':
continue
last_name = element.find_elements_by_css_selector("td")[1].text
first_name = element.find_elements_by_css_selector("td")[2].text
address = element.find_elements_by_css_selector("td")[3].text
id = element.find_element_by_name("selected[]").get_attribute("value")
all_emails = element.find_elements_by_css_selector("td")[4].text
all_phones = element.find_elements_by_css_selector("td")[5].text
self.contact_cache.append(Contacts(first_name=first_name, last_name=last_name, id=id, address=address,
all_phones=all_phones,
all_emails=all_emails
# ,
# address=address, email=all_emails[0], email_2=all_emails[1],
# email_3=all_emails[2], home=all_phones[0], mobile=all_phones[1],
# work=all_phones[2], phone2=all_phones[3]
))
return list(self.contact_cache)
def get_contact_list_from_group(self, group_id):
wd = self.app.wd
self.open_contacts_page()
select_element = Select(wd.find_element_by_name('group'))
select_element.select_by_value(group_id)
contacts_from_group = []
for element in wd.find_elements_by_css_selector("tr"):
if element.get_attribute("name") != 'entry':
continue
last_name = element.find_elements_by_css_selector("td")[1].text
first_name = element.find_elements_by_css_selector("td")[2].text
address = element.find_elements_by_css_selector("td")[3].text
id = element.find_element_by_name("selected[]").get_attribute("value")
contacts_from_group.append(Contacts(first_name=first_name, last_name=last_name, id=id, address=address))
return list(contacts_from_group)
def open_contact_view_by_index(self, index):
wd = self.app.wd
self.open_contacts_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[6]
cell.find_element_by_tag_name("a").click()
def open_contact_to_edit_by_index(self, index):
wd = self.app.wd
self.open_contacts_page()
row = wd.find_elements_by_name("entry")[index]
cell = row.find_elements_by_tag_name("td")[7]
cell.find_element_by_tag_name("a").click()
def get_contact_info_from_edit_page(self, index):
wd = self.app.wd
self.open_contact_to_edit_by_index(index)
first_name = wd.find_element_by_name("firstname").get_attribute("value")
last_name = wd.find_element_by_name("lastname").get_attribute("value")
id = wd.find_element_by_name("id").get_attribute("value")
home = wd.find_element_by_name("home").get_attribute("value")
mobile = wd.find_element_by_name("mobile").get_attribute("value")
work = wd.find_element_by_name("work").get_attribute("value")
phone2 = wd.find_element_by_name("phone2").get_attribute("value")
email = wd.find_element_by_name("email").get_attribute("value")
email_2 = wd.find_element_by_name("email2").get_attribute("value")
email_3 = wd.find_element_by_name("email3").get_attribute("value")
address = wd.find_element_by_name("address").get_attribute("value")
return Contacts(first_name=first_name, last_name=last_name, id=id, home=home, work=work, mobile=mobile,
phone2=phone2, email=email, email_2=email_2, email_3=email_3, address=address)
def get_contact_from_view_page(self, index):
wd = self.app.wd
self.open_contact_view_by_index(index)
text = wd.find_element_by_id("content").text
home = re.search("H: (.*)", text).group(1)
mobile = re.search("M: (.*)", text).group(1)
work = re.search("W: (.*)", text).group(1)
phone2 = re.search("P: (.*)", text).group(1)
return Contacts(home=home, work=work, mobile=mobile, phone2=phone2)
def add_contact_to_group_by_id(self, id, group_id):
wd = self.app.wd
select_element = Select(wd.find_element_by_name('group'))
select_element.select_by_visible_text('[all]')
self.select_contact_by_id(id)
select_element = Select(wd.find_element_by_name('to_group'))
select_element.select_by_value(group_id)
wd.find_element_by_name("add").click()
wd.find_element_by_xpath("//i[text()='Go to ']").click()
def get_random_group_for_add_contact(self):
wd = self.app.wd
self.open_contacts_page()
select_element = Select(wd.find_element_by_name('to_group'))
all_groups_ids = [o.get_attribute("value") for o in select_element.options]
random_group_id = random.choice(all_groups_ids)
return random_group_id
def delete_contact_from_group_by_id(self, id, group_id):
wd = self.app.wd
select_element = Select(wd.find_element_by_name('group'))
select_element.select_by_value(group_id)
self.select_contact_by_id(id)
wd.find_element_by_xpath("//input[@name='remove']").click()
wd.find_element_by_xpath("//i[text()='return to ']").click()
def open_all_contacts_list(self):
wd = self.app.wd
select_element = Select(wd.find_element_by_name('group'))
select_element.select_by_visible_text('[all]')
| 48.490862
| 118
| 0.663041
| 2,584
| 18,572
| 4.374226
| 0.054567
| 0.107228
| 0.227727
| 0.257454
| 0.854906
| 0.836857
| 0.794656
| 0.763956
| 0.739007
| 0.710873
| 0
| 0.006161
| 0.204663
| 18,572
| 382
| 119
| 48.617801
| 0.759055
| 0.023692
| 0
| 0.669643
| 0
| 0
| 0.089153
| 0.005962
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074405
| false
| 0
| 0.014881
| 0
| 0.116071
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
975397b219ee0d2e6a42e4d9fe569f3808b0f762
| 16,285
|
py
|
Python
|
tests/regression/py_scripts/make_bf_seqprops.py
|
ebatz/lalibe
|
5f00bce5c5b2ab7873c4569fb48f89366bc1b9a6
|
[
"BSD-3-Clause-LBNL"
] | 9
|
2019-07-25T15:26:34.000Z
|
2022-03-25T13:00:20.000Z
|
tests/regression/py_scripts/make_bf_seqprops.py
|
wittscien/lalibe
|
5f00bce5c5b2ab7873c4569fb48f89366bc1b9a6
|
[
"BSD-3-Clause-LBNL"
] | 5
|
2019-08-14T23:29:58.000Z
|
2021-05-13T16:56:07.000Z
|
tests/regression/py_scripts/make_bf_seqprops.py
|
wittscien/lalibe
|
5f00bce5c5b2ab7873c4569fb48f89366bc1b9a6
|
[
"BSD-3-Clause-LBNL"
] | 6
|
2019-05-21T00:26:54.000Z
|
2022-02-16T23:38:23.000Z
|
import sys
import h5py as h5
import tables
import numpy as np
import gamma
import time
import contractions
np.set_printoptions(linewidth=180)
''' This code reads a single h5 file that stores a propagator from a point source
to all, from all locations on the lattice. It uses these props to manually
construct the 'sequential' propagator
seqprop[t,zz,yy,xx,i,j,a,b] = curr_prop[t,zz,yy,xx,i,k,a,c] G[k,kk] prop[1,z,y,x,kk,j,c,b]
and then construct the 3pt function. It compares these 'brute force' 3pt
functions to the ones generated by the LALIBE code.
Currently tested and passing
A3 - 0 momentum at the sink and current, proton spin up and dn, full and UPPER/LOWER seqprops
DD positive parity including boundary wrapping
DD negative parity including boundary wrapping
UU positive parity including boundary wrapping
UU negative parity including boundary wrapping
'''
quark_spin='half'
spin='up'
f = h5.File('test_lalibe/all_pt_props.h5','r')
f_seqprop = tables.open_file('test_lalibe/seqprop_bf.h5','a')
if 'props' not in f_seqprop.root:
f_seqprop.create_group('/','props')
g_a3 = np.einsum('ik,kj->ij',gamma.g_3,gamma.g_5)
''' set source at x=y=z=0 '''
t_src='3'
src='x0y0z0t'+t_src
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
if seqprop_name not in f_seqprop.get_node('/props'):
start_time = time.time()
seqprop = np.zeros_like(prop)
for x in range(4):
for y in range(4):
for z in range(4):
curr_prop = f['props/pt_prop_x%d_y%d_z%d_t%d' %(x,y,z,t)][()]
'''
seqprop[t,zz,yy,xx,i,j,a,b] = curr_prop[t,zz,yy,xx,i,k,a,c] G[k,kk] prop[1,z,y,x,kk,j,c,b]
'''
# multiply by Gamma
curr_prop = np.einsum('tzyxikab,kj->tzyxijab',curr_prop,g_a3)
seqprop += np.einsum('tzyxikac,kjcb->tzyxijab',curr_prop,prop[t,z,y,x])
f_seqprop.create_array('/props',seqprop_name,seqprop)
stop_time = time.time()
print('t = %d, seconds = %.2f' %(t,stop_time-start_time))
else:
print('t = %d, already created' %t)
''' set source at x=y=z=0 '''
t_src='6'
src='x0y0z0t'+t_src
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
if seqprop_name not in f_seqprop.get_node('/props'):
start_time = time.time()
seqprop = np.zeros_like(prop)
for x in range(4):
for y in range(4):
for z in range(4):
curr_prop = f['props/pt_prop_x%d_y%d_z%d_t%d' %(x,y,z,t)][()]
'''
seqprop[t,zz,yy,xx,i,j,a,b] = curr_prop[t,zz,yy,xx,i,k,a,c] G[k,kk] prop[1,z,y,x,kk,j,c,b]
'''
# multiply by Gamma
curr_prop = np.einsum('tzyxikab,kj->tzyxijab',curr_prop,g_a3)
seqprop += np.einsum('tzyxikac,kjcb->tzyxijab',curr_prop,prop[t,z,y,x])
f_seqprop.create_array('/props',seqprop_name,seqprop)
stop_time = time.time()
print('t = %d, seconds = %.2f' %(t,stop_time-start_time))
else:
print('t = %d, already created' %t)
f_seqprop.close()
''' perform contractions '''
U = gamma.U_DR_to_DP
Uadj = gamma.U_DR_to_DP_adj
t_src='3'
src='x0y0z0t'+t_src
t_sep=4
t_sink = (int(t_src) + t_sep) % 8
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
prop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,prop,U)
f_seqprop = tables.open_file('test_lalibe/seqprop_bf.h5','r')
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' DD '''
proton = contractions.proton_spin_contract(prop_DP,prop_DP,seqprop_DP,'proton',spin)
proton_time = np.einsum('tzyx->t',proton)
#print('tg = %d' %t)
#print('real', proton_time.real)
#print('imag', proton_time.imag)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src)+t_sep >= 8:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_DD_%s_%s_t0_3_tsep_4_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src)).read()
lalibe_3pt.close()
have_old=True
try:
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_DD_%s_%s_tsrc_3_tsep_4/A3/x0_y0_z0_t%s/px0_py0_pz0/corr_local_fn' %(spin,spin,t_src)).read()
lalibe_3pt_old.close()
except:
print('DD %s not run in old code yet' %(spin))
have_old=False
np.set_printoptions(precision=6)
print('\nA3 DD %s corr, src=0,0,0,%s, t_sep=4\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
if have_old:
print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
if have_old:
print(a3_old.imag)
''' UU up '''
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(seqprop_DP,prop_DP,prop_DP,'proton',spin)
proton += contractions.proton_spin_contract(prop_DP,seqprop_DP,prop_DP,'proton',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src)+t_sep >= 8:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_UU_%s_%s_t0_%s_tsep_4_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,t_src)).read()
lalibe_3pt.close()
""" NEED to run this
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_UU_up_up_tsrc_3_tsep_4/A3/x0_y0_z0_t%s/px0_py0_pz0/corr_local_fn' %t_src).read()
lalibe_3pt_old.close()
"""
np.set_printoptions(precision=6)
print('\nA3 UU %s corr, src=0,0,0,%s, t_sep=4\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
#print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
#print(a3_old.imag)
''' proton, DD, up, t=6 '''
t_src='6'
src='x0y0z0t'+t_src
t_sink = (int(t_src) + t_sep) % 8
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
prop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,prop,U)
p_2pt_up = contractions.proton_spin_contract(prop_DP,prop_DP,prop_DP,'proton',spin)
p_2pt_up_time = np.einsum('tzyx->t',p_2pt_up)
print('\nProton 2pt')
print(p_2pt_up_time.real)
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(prop_DP,prop_DP,seqprop_DP,'proton',spin)
proton_time = np.einsum('tzyx->t',proton)
#print('tg = %d' %t)
#print('real', proton_time.real)
#print('imag', proton_time.imag)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src)+t_sep >= 8:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_DD_%s_%s_t0_%s_tsep_4_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,t_src)).read()
lalibe_3pt.close()
have_old=True
try:
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_DD_%s_%s_tsrc_%s_tsep_4/A3/x0_y0_z0_t%s/px0_py0_pz0/corr_local_fn' %(spin,spin,t_src,t_src)).read()
lalibe_3pt_old.close()
except:
have_old=False
print('DD %s not run in old code yet' %(spin))
np.set_printoptions(precision=6)
print('\nA3 DD %s corr, src=0,0,0,%s, t_sep=4\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
if have_old:
print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
if have_old:
print(a3_old.imag)
''' UU up '''
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(seqprop_DP,prop_DP,prop_DP,'proton',spin)
proton += contractions.proton_spin_contract(prop_DP,seqprop_DP,prop_DP,'proton',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src)+t_sep >= 8:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_UU_%s_%s_t0_%s_tsep_4_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,t_src)).read()
lalibe_3pt.close()
""" NEED to run this
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_UU_up_up_tsrc_3_tsep_4/A3/x0_y0_z0_t%s/px0_py0_pz0/corr_local_fn' %t_src).read()
lalibe_3pt_old.close()
"""
np.set_printoptions(precision=6)
print('\nA3 UU %s corr, src=0,0,0,%s, t_sep=4\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
#print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
#print(a3_old.imag)
''' negative parity, no boundary wrap '''
t_src='6'
t_sep=-4
src='x0y0z0t'+t_src
t_sink = (int(t_src) + t_sep) % 8
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
prop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,prop,U)
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(prop_DP,prop_DP,seqprop_DP,'proton_np',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src) + t_sep < 0:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_np_DD_%s_%s_t0_%s_tsep_%s_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,str(t_sep),t_src)).read()
lalibe_3pt.close()
'''
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_DD_up_up_tsrc_3_tsep_4/A3/x0_y0_z0_t3/px0_py0_pz0/corr_local_fn').read()
lalibe_3pt_old.close()
'''
np.set_printoptions(precision=6)
print('\nA3 NEG PAR DD %s corr, src=0,0,0,%s, t_sep=%s\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src,str(t_sep)))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
#print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
#print(a3_old.imag)
''' UU up NP '''
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(seqprop_DP,prop_DP,prop_DP,'proton_np',spin)
proton += contractions.proton_spin_contract(prop_DP,seqprop_DP,prop_DP,'proton_np',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src) + t_sep < 0:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_np_UU_%s_%s_t0_%s_tsep_%s_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,str(t_sep),t_src)).read()
lalibe_3pt.close()
np.set_printoptions(precision=6)
print('\nA3 NEG PAR UU %s corr, src=0,0,0,%s, t_sep=%s\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src,str(t_sep)))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
''' negative parity, boundary wrap '''
t_src='3'
t_sep=-4
src='x0y0z0t'+t_src
t_sink = (int(t_src) + t_sep) % 8
prop = f['props/pt_prop_x0_y0_z0_t'+t_src][()]
prop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,prop,U)
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(prop_DP,prop_DP,seqprop_DP,'proton_np',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src) + t_sep < 0:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_np_DD_%s_%s_t0_%s_tsep_%s_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,str(t_sep),t_src)).read()
lalibe_3pt.close()
'''
lalibe_3pt_old = tables.open_file('test_lalibe/lalibe_3ptfn.old.h5','r')
a3_old = lalibe_3pt_old.get_node('/PP_seqprop_proton_DD_up_up_tsrc_3_tsep_4/A3/x0_y0_z0_t3/px0_py0_pz0/corr_local_fn').read()
lalibe_3pt_old.close()
'''
np.set_printoptions(precision=6)
print('\nA3 NEG PAR DD %s corr, src=0,0,0,%s, t_sep=%s\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src,str(t_sep)))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
#print(a3_old.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
#print(a3_old.imag)
''' UU up NP '''
all_proton_corrs = []
for t in range(8):
seqprop_name = 'seqprop_src'+src+'_tg'+str(t)
seqprop = f_seqprop.get_node('/props/'+seqprop_name).read()
seqprop_DP = np.einsum('ik,tzyxklab,lj->tzyxijab',Uadj,seqprop,U)
''' spin up, DD '''
proton = contractions.proton_spin_contract(seqprop_DP,prop_DP,prop_DP,'proton_np',spin)
proton += contractions.proton_spin_contract(prop_DP,seqprop_DP,prop_DP,'proton_np',spin)
proton_time = np.einsum('tzyx->t',proton)
all_proton_corrs.append(proton_time)
all_proton_corrs = np.array(all_proton_corrs)
all_proton_corrs = np.roll(all_proton_corrs,-int(t_src),axis=0)
if int(t_src) + t_sep < 0:
all_proton_corrs = -all_proton_corrs
lalibe_3pt = tables.open_file('test_lalibe/lalibe_3ptfn.h5','r')
a3 = lalibe_3pt.get_node('/'+quark_spin+'/proton_np_UU_%s_%s_t0_%s_tsep_%s_sink_mom_px0_py0_pz0/A3/x0_y0_z0_t%s/px0_py0_pz0/local_current' %(spin,spin,t_src,str(t_sep),t_src)).read()
lalibe_3pt.close()
np.set_printoptions(precision=6)
print('\nA3 NEG PAR UU %s corr, src=0,0,0,%s, t_sep=%s\nBrute Force\nLALIBE\nOLD LALIBE' %(spin,t_src,str(t_sep)))
print('real')
print(all_proton_corrs[:,t_sink].real)
print(a3.real)
print('imag')
print(all_proton_corrs[:,t_sink].imag)
print(a3.imag)
f.close()
f_seqprop.close()
| 37.960373
| 182
| 0.709917
| 2,933
| 16,285
| 3.618138
| 0.068871
| 0.067848
| 0.105541
| 0.011873
| 0.922729
| 0.903694
| 0.903694
| 0.899736
| 0.891915
| 0.886732
| 0
| 0.029343
| 0.127356
| 16,285
| 428
| 183
| 38.049065
| 0.717402
| 0.02094
| 0
| 0.90604
| 0
| 0.026846
| 0.233867
| 0.137294
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02349
| 0
| 0.02349
| 0.258389
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ade28a0dca24d0f82a8ac85e9cb31c49b9edf12
| 3,369
|
py
|
Python
|
oneshot/alfassy/setops_funcs.py
|
nganltp/admicro-LaSO
|
857d67a40af437ab57068fb0de35e4ada56c6209
|
[
"BSD-3-Clause"
] | 83
|
2019-04-14T06:58:15.000Z
|
2022-03-01T01:34:03.000Z
|
oneshot/alfassy/setops_funcs.py
|
leokarlin/LaSO
|
8941bdc9316361ad03dbc2bcabd4bf9922c0ecc7
|
[
"BSD-3-Clause"
] | 17
|
2019-04-28T04:26:24.000Z
|
2022-01-19T15:37:42.000Z
|
oneshot/alfassy/setops_funcs.py
|
nganltp/admicro-LaSO
|
857d67a40af437ab57068fb0de35e4ada56c6209
|
[
"BSD-3-Clause"
] | 15
|
2019-09-05T04:22:10.000Z
|
2022-01-13T15:31:25.000Z
|
import numpy as np
import torch
def set_subtraction_operation(labels1, labels2):
batch_size = labels1.shape[0]
classesNum = labels1.shape[1]
# print("labels1: ", labels1)
# print("labels2: ", labels2)
subLabels = []
for vecNum in range(batch_size):
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[vecNum][classNum] == 1) and (labels2[vecNum][classNum] == 0):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
subLabels += [subLabelPerClass]
# print(subLabels)
npSubLabels = np.asarray(subLabels)
# print(npSubLabels)
torSubLabels = torch.from_numpy(npSubLabels)
# print(torSubLabels)
return torSubLabels
def set_union_operation(labels1, labels2):
batch_size = labels1.shape[0]
classesNum = labels1.shape[1]
subLabels = []
for vecNum in range(batch_size):
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[vecNum][classNum] == 1) or (labels2[vecNum][classNum] == 1):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
subLabels += [subLabelPerClass]
npSubLabels = np.asarray(subLabels)
torSubLabels = torch.from_numpy(npSubLabels)
return torSubLabels
def set_intersection_operation(labels1, labels2):
batch_size = labels1.shape[0]
classesNum = labels1.shape[1]
subLabels = []
for vecNum in range(batch_size):
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[vecNum][classNum] == 1) and (labels2[vecNum][classNum] == 1):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
subLabels += [subLabelPerClass]
npSubLabels = np.asarray(subLabels)
torSubLabels = torch.from_numpy(npSubLabels)
return torSubLabels
def set_subtraction_operation_one_sample(labels1, labels2):
classesNum = labels1.shape[0]
# print("labels1: ", labels1)
# print("labels2: ", labels2)
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[classNum] == 1) and (labels2[classNum] == 0):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
# print(subLabels)
npSubLabels = np.asarray(subLabelPerClass)
# print(npSubLabels)
# subLabelPerClass = torch.from_numpy(subLabelPerClass)
# print(torSubLabels)
return npSubLabels
def set_union_operation_one_sample(labels1, labels2):
classesNum = labels1.shape[0]
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[classNum] == 1) or (labels2[classNum] == 1):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
npSubLabels = np.asarray(subLabelPerClass)
# torSubLabels = torch.from_numpy(npSubLabels)
return npSubLabels
def set_intersection_operation_one_sample(labels1, labels2):
classesNum = labels1.shape[0]
subLabelPerClass = []
for classNum in range(classesNum):
if (labels1[classNum] == 1) and (labels2[classNum] == 1):
subLabelPerClass += [1]
else:
subLabelPerClass += [0]
npSubLabels = np.asarray(subLabelPerClass)
# torSubLabels = torch.from_numpy(npSubLabels)
return npSubLabels
| 33.029412
| 85
| 0.636094
| 324
| 3,369
| 6.521605
| 0.117284
| 0.042593
| 0.036914
| 0.082347
| 0.86654
| 0.816848
| 0.78088
| 0.732134
| 0.732134
| 0.706105
| 0
| 0.029482
| 0.254972
| 3,369
| 102
| 86
| 33.029412
| 0.812351
| 0.108934
| 0
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077922
| false
| 0
| 0.025974
| 0
| 0.181818
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c155a8326a21b195b579327a1cac812fe9eecc2e
| 20,095
|
py
|
Python
|
sdk/python/pulumi_ucloud/udpn/udpn_connection.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2021-08-18T04:55:38.000Z
|
2021-09-08T07:59:24.000Z
|
sdk/python/pulumi_ucloud/udpn/udpn_connection.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-01-28T17:59:37.000Z
|
2022-01-29T03:44:09.000Z
|
sdk/python/pulumi_ucloud/udpn/udpn_connection.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-06-23T07:10:40.000Z
|
2021-06-23T09:25:12.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['UDPNConnectionArgs', 'UDPNConnection']
@pulumi.input_type
class UDPNConnectionArgs:
def __init__(__self__, *,
peer_region: pulumi.Input[str],
bandwidth: Optional[pulumi.Input[int]] = None,
charge_type: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a UDPNConnection resource.
:param pulumi.Input[str] peer_region: The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
:param pulumi.Input[int] bandwidth: Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
:param pulumi.Input[str] charge_type: Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
:param pulumi.Input[int] duration: The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
"""
pulumi.set(__self__, "peer_region", peer_region)
if bandwidth is not None:
pulumi.set(__self__, "bandwidth", bandwidth)
if charge_type is not None:
pulumi.set(__self__, "charge_type", charge_type)
if duration is not None:
pulumi.set(__self__, "duration", duration)
@property
@pulumi.getter(name="peerRegion")
def peer_region(self) -> pulumi.Input[str]:
"""
The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
return pulumi.get(self, "peer_region")
@peer_region.setter
def peer_region(self, value: pulumi.Input[str]):
pulumi.set(self, "peer_region", value)
@property
@pulumi.getter
def bandwidth(self) -> Optional[pulumi.Input[int]]:
"""
Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
"""
return pulumi.get(self, "bandwidth")
@bandwidth.setter
def bandwidth(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bandwidth", value)
@property
@pulumi.getter(name="chargeType")
def charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
"""
return pulumi.get(self, "charge_type")
@charge_type.setter
def charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "charge_type", value)
@property
@pulumi.getter
def duration(self) -> Optional[pulumi.Input[int]]:
"""
The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
"""
return pulumi.get(self, "duration")
@duration.setter
def duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "duration", value)
@pulumi.input_type
class _UDPNConnectionState:
def __init__(__self__, *,
bandwidth: Optional[pulumi.Input[int]] = None,
charge_type: Optional[pulumi.Input[str]] = None,
create_time: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
expire_time: Optional[pulumi.Input[str]] = None,
peer_region: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering UDPNConnection resources.
:param pulumi.Input[int] bandwidth: Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
:param pulumi.Input[str] charge_type: Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
:param pulumi.Input[str] create_time: The time of creation for UDPN connection, formatted by RFC3339 time string.
:param pulumi.Input[int] duration: The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
:param pulumi.Input[str] expire_time: The expiration time for UDPN connection, formatted by RFC3339 time string.
:param pulumi.Input[str] peer_region: The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
if bandwidth is not None:
pulumi.set(__self__, "bandwidth", bandwidth)
if charge_type is not None:
pulumi.set(__self__, "charge_type", charge_type)
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if duration is not None:
pulumi.set(__self__, "duration", duration)
if expire_time is not None:
pulumi.set(__self__, "expire_time", expire_time)
if peer_region is not None:
pulumi.set(__self__, "peer_region", peer_region)
@property
@pulumi.getter
def bandwidth(self) -> Optional[pulumi.Input[int]]:
"""
Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
"""
return pulumi.get(self, "bandwidth")
@bandwidth.setter
def bandwidth(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "bandwidth", value)
@property
@pulumi.getter(name="chargeType")
def charge_type(self) -> Optional[pulumi.Input[str]]:
"""
Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
"""
return pulumi.get(self, "charge_type")
@charge_type.setter
def charge_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "charge_type", value)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
The time of creation for UDPN connection, formatted by RFC3339 time string.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def duration(self) -> Optional[pulumi.Input[int]]:
"""
The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
"""
return pulumi.get(self, "duration")
@duration.setter
def duration(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "duration", value)
@property
@pulumi.getter(name="expireTime")
def expire_time(self) -> Optional[pulumi.Input[str]]:
"""
The expiration time for UDPN connection, formatted by RFC3339 time string.
"""
return pulumi.get(self, "expire_time")
@expire_time.setter
def expire_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expire_time", value)
@property
@pulumi.getter(name="peerRegion")
def peer_region(self) -> Optional[pulumi.Input[str]]:
"""
The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
return pulumi.get(self, "peer_region")
@peer_region.setter
def peer_region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "peer_region", value)
class UDPNConnection(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
charge_type: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
peer_region: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
UDPN (UCloud Dedicated Private Network),you can use Dedicated Private Network to achieve high-speed, stable, secure, and dedicated communications between different data centers. The most frequent scenario is to create network connection of clusters across regions.
> **VPC Peering Connections with UDPN Connection** The cross-region Dedicated Private Network must be established if the two VPCs located in different regions are expected to be connected.
> **Note** The additional packet head will be added and included in the overall length of packet due to the tunneling UDPN adopted. Since the number of the bytes of packet head is fixed, the bigger data packet is, the less usage will be taken for the packet head.
## Example Usage
```python
import pulumi
import pulumi_ucloud as ucloud
# connect provider's region (cn-bj2) and peer region (cn-sh2)
example = ucloud.udpn.UDPNConnection("example",
bandwidth=2,
peer_region="cn-sh2")
```
## Import
UDPN connection can be imported using the `id`, e.g.
```sh
$ pulumi import ucloud:udpn/uDPNConnection:UDPNConnection example udpn-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] bandwidth: Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
:param pulumi.Input[str] charge_type: Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
:param pulumi.Input[int] duration: The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
:param pulumi.Input[str] peer_region: The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UDPNConnectionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
UDPN (UCloud Dedicated Private Network),you can use Dedicated Private Network to achieve high-speed, stable, secure, and dedicated communications between different data centers. The most frequent scenario is to create network connection of clusters across regions.
> **VPC Peering Connections with UDPN Connection** The cross-region Dedicated Private Network must be established if the two VPCs located in different regions are expected to be connected.
> **Note** The additional packet head will be added and included in the overall length of packet due to the tunneling UDPN adopted. Since the number of the bytes of packet head is fixed, the bigger data packet is, the less usage will be taken for the packet head.
## Example Usage
```python
import pulumi
import pulumi_ucloud as ucloud
# connect provider's region (cn-bj2) and peer region (cn-sh2)
example = ucloud.udpn.UDPNConnection("example",
bandwidth=2,
peer_region="cn-sh2")
```
## Import
UDPN connection can be imported using the `id`, e.g.
```sh
$ pulumi import ucloud:udpn/uDPNConnection:UDPNConnection example udpn-abc123456
```
:param str resource_name: The name of the resource.
:param UDPNConnectionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UDPNConnectionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
charge_type: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
peer_region: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UDPNConnectionArgs.__new__(UDPNConnectionArgs)
__props__.__dict__["bandwidth"] = bandwidth
__props__.__dict__["charge_type"] = charge_type
__props__.__dict__["duration"] = duration
if peer_region is None and not opts.urn:
raise TypeError("Missing required property 'peer_region'")
__props__.__dict__["peer_region"] = peer_region
__props__.__dict__["create_time"] = None
__props__.__dict__["expire_time"] = None
super(UDPNConnection, __self__).__init__(
'ucloud:udpn/uDPNConnection:UDPNConnection',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bandwidth: Optional[pulumi.Input[int]] = None,
charge_type: Optional[pulumi.Input[str]] = None,
create_time: Optional[pulumi.Input[str]] = None,
duration: Optional[pulumi.Input[int]] = None,
expire_time: Optional[pulumi.Input[str]] = None,
peer_region: Optional[pulumi.Input[str]] = None) -> 'UDPNConnection':
"""
Get an existing UDPNConnection resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] bandwidth: Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
:param pulumi.Input[str] charge_type: Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
:param pulumi.Input[str] create_time: The time of creation for UDPN connection, formatted by RFC3339 time string.
:param pulumi.Input[int] duration: The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
:param pulumi.Input[str] expire_time: The expiration time for UDPN connection, formatted by RFC3339 time string.
:param pulumi.Input[str] peer_region: The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _UDPNConnectionState.__new__(_UDPNConnectionState)
__props__.__dict__["bandwidth"] = bandwidth
__props__.__dict__["charge_type"] = charge_type
__props__.__dict__["create_time"] = create_time
__props__.__dict__["duration"] = duration
__props__.__dict__["expire_time"] = expire_time
__props__.__dict__["peer_region"] = peer_region
return UDPNConnection(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def bandwidth(self) -> pulumi.Output[Optional[int]]:
"""
Maximum bandwidth to the elastic public network, measured in Mbps (Mega bit per second). range from 2 - 1000M. The default value is "1"
"""
return pulumi.get(self, "bandwidth")
@property
@pulumi.getter(name="chargeType")
def charge_type(self) -> pulumi.Output[Optional[str]]:
"""
Charge type. Possible values are: "year" as pay by year, "month" as pay by month, "dynamic" as pay by hour. The default value is "month".
"""
return pulumi.get(self, "charge_type")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
The time of creation for UDPN connection, formatted by RFC3339 time string.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def duration(self) -> pulumi.Output[Optional[int]]:
"""
The duration that you will buy the resource, the default value is "1". It is not required when "dynamic" (pay by hour), the value is "0" when pay by month and the instance will be valid till the last day of that month.
"""
return pulumi.get(self, "duration")
@property
@pulumi.getter(name="expireTime")
def expire_time(self) -> pulumi.Output[str]:
"""
The expiration time for UDPN connection, formatted by RFC3339 time string.
"""
return pulumi.get(self, "expire_time")
@property
@pulumi.getter(name="peerRegion")
def peer_region(self) -> pulumi.Output[str]:
"""
The correspondent region of dedicated connection, please refer to the region and [availability zone list](https://docs.ucloud.cn/api/summary/regionlist) and [UDPN price list](https://docs.ucloud.cn/network/udpn/udpn_price).
"""
return pulumi.get(self, "peer_region")
| 51.002538
| 272
| 0.664643
| 2,594
| 20,095
| 4.998843
| 0.094834
| 0.057685
| 0.060076
| 0.039022
| 0.850852
| 0.829413
| 0.816303
| 0.79818
| 0.794247
| 0.765096
| 0
| 0.007117
| 0.23782
| 20,095
| 393
| 273
| 51.132316
| 0.839514
| 0.476138
| 0
| 0.653846
| 1
| 0
| 0.090662
| 0.004292
| 0
| 0
| 0
| 0
| 0
| 1
| 0.158654
| false
| 0.004808
| 0.024038
| 0
| 0.278846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a9e283f327135ae1b4714373bf88c24facb4240e
| 5,378
|
py
|
Python
|
Bubblez/classes/api/send/User.py
|
ProjectBubblez/Bubblez.py
|
332ca7206850f4faee63badd804e62972e6d4bef
|
[
"MIT"
] | 1
|
2021-11-09T20:45:41.000Z
|
2021-11-09T20:45:41.000Z
|
Bubblez/classes/api/send/User.py
|
ProjectBubblez/Bubblez.py
|
332ca7206850f4faee63badd804e62972e6d4bef
|
[
"MIT"
] | null | null | null |
Bubblez/classes/api/send/User.py
|
ProjectBubblez/Bubblez.py
|
332ca7206850f4faee63badd804e62972e6d4bef
|
[
"MIT"
] | null | null | null |
from ...Color import Color
from ...Log import logTime
from ..receive.User import User as ReceivedUser
from ..receive.Post import Post as ReceivedPost
from ..receive.Reply import Reply as ReceivedReply
import requests, traceback, json
class User:
def __init__(self, client) -> None:
self.client = client
def get(self, username:str):
"""
Get a user with username!
username: `str`
"""
data, url = {"token": self.client.token, "username": username}, self.client.live_url
if self.client.canary: url = self.client.canary_url
url += "/user/get"
if self.client.verbose:
print(Color.OKCYAN, f"[Bubblez.py-api-{self.client.prefix_log}]Sending API request to: {Color.BOLD}/user/get", Color.ENDC)
response = requests.post(url=url, data=data)
if response.ok:
try:
resp_js = response.json()
if '200' in resp_js and resp_js['200'] == f'Found user':
print(f"{Color.OKGREEN}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} Api found user: {resp_js['username']} {Color.ENDC}")
return ReceivedUser(self.client, resp_js)
else:
print(f"{Color.WARNING}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} Did not find user! Code: {response.status_code}", Color.ENDC)
print(f"{Color.WARNING}Reason: {response.content}", Color.ENDC)
return False
except:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/get! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
else:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/get! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
def check(self):
"""
Check your token!
"""
data, url = {"token": self.client.token}, self.client.live_url
if self.client.canary: url = self.client.canary_url
url += "/user/check"
response = requests.post(url=url, data=data)
if response.ok:
try:
resp_js = response.json()
if '200' in resp_js and resp_js['200'] == f'Found user':
print(f"{Color.OKGREEN}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} User found with username: {resp_js['username']}! {Color.ENDC}")
return ReceivedUser(self.client, resp_js)
else:
print(f"{Color.WARNING}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} Could not find user! Code: {response.status_code}", Color.ENDC)
print(f"{Color.WARNING}Reason: {response.content}", Color.ENDC)
return False
except:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/check! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
else:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/check! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
def ping(self):
"""
Set your status to online!
"""
data, url = {"token": self.client.token}, self.client.live_url
if self.client.canary: url = self.client.canary_url
url += "/user/check"
response = requests.post(url=url, data=data)
if response.ok:
try:
resp_js = response.json()
if '200' in resp_js and resp_js['200'] == f'Pong':
print(f"{Color.OKGREEN}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} User found with username: {resp_js['username']}! {Color.ENDC}")
return True
else:
print(f"{Color.WARNING}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} Could not find user with token! Code: {response.status_code}", Color.ENDC)
print(f"{Color.WARNING}Reason: {response.content}", Color.ENDC)
return False
except:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/ping! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
else:
print(f"{Color.FAIL}[Bubblez.py-api-{self.client.prefix_log}] {logTime()} There is a error acoured on user/ping! Code: {response.status_code}", Color.ENDC)
print(f"{Color.FAIL}Reason: {response.content}", Color.ENDC)
traceback.print_exc()
return False
| 52.72549
| 172
| 0.58293
| 668
| 5,378
| 4.615269
| 0.124252
| 0.097308
| 0.074927
| 0.067467
| 0.8482
| 0.8482
| 0.839442
| 0.829387
| 0.829387
| 0.829387
| 0
| 0.004656
| 0.281145
| 5,378
| 102
| 173
| 52.72549
| 0.792809
| 0.016735
| 0
| 0.744186
| 0
| 0.151163
| 0.397812
| 0.201881
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.069767
| 0
| 0.267442
| 0.325581
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a9ea49815628300bc2e5cbaf0e6f492ef862ab64
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_nasus/na_nasus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nasus/na_nasus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_nasus/na_nasus_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Nasus_Top_Aatrox(Ratings):
pass
class NA_Nasus_Top_Ahri(Ratings):
pass
class NA_Nasus_Top_Akali(Ratings):
pass
class NA_Nasus_Top_Alistar(Ratings):
pass
class NA_Nasus_Top_Amumu(Ratings):
pass
class NA_Nasus_Top_Anivia(Ratings):
pass
class NA_Nasus_Top_Annie(Ratings):
pass
class NA_Nasus_Top_Ashe(Ratings):
pass
class NA_Nasus_Top_AurelionSol(Ratings):
pass
class NA_Nasus_Top_Azir(Ratings):
pass
class NA_Nasus_Top_Bard(Ratings):
pass
class NA_Nasus_Top_Blitzcrank(Ratings):
pass
class NA_Nasus_Top_Brand(Ratings):
pass
class NA_Nasus_Top_Braum(Ratings):
pass
class NA_Nasus_Top_Caitlyn(Ratings):
pass
class NA_Nasus_Top_Camille(Ratings):
pass
class NA_Nasus_Top_Cassiopeia(Ratings):
pass
class NA_Nasus_Top_Chogath(Ratings):
pass
class NA_Nasus_Top_Corki(Ratings):
pass
class NA_Nasus_Top_Darius(Ratings):
pass
class NA_Nasus_Top_Diana(Ratings):
pass
class NA_Nasus_Top_Draven(Ratings):
pass
class NA_Nasus_Top_DrMundo(Ratings):
pass
class NA_Nasus_Top_Ekko(Ratings):
pass
class NA_Nasus_Top_Elise(Ratings):
pass
class NA_Nasus_Top_Evelynn(Ratings):
pass
class NA_Nasus_Top_Ezreal(Ratings):
pass
class NA_Nasus_Top_Fiddlesticks(Ratings):
pass
class NA_Nasus_Top_Fiora(Ratings):
pass
class NA_Nasus_Top_Fizz(Ratings):
pass
class NA_Nasus_Top_Galio(Ratings):
pass
class NA_Nasus_Top_Gangplank(Ratings):
pass
class NA_Nasus_Top_Garen(Ratings):
pass
class NA_Nasus_Top_Gnar(Ratings):
pass
class NA_Nasus_Top_Gragas(Ratings):
pass
class NA_Nasus_Top_Graves(Ratings):
pass
class NA_Nasus_Top_Hecarim(Ratings):
pass
class NA_Nasus_Top_Heimerdinger(Ratings):
pass
class NA_Nasus_Top_Illaoi(Ratings):
pass
class NA_Nasus_Top_Irelia(Ratings):
pass
class NA_Nasus_Top_Ivern(Ratings):
pass
class NA_Nasus_Top_Janna(Ratings):
pass
class NA_Nasus_Top_JarvanIV(Ratings):
pass
class NA_Nasus_Top_Jax(Ratings):
pass
class NA_Nasus_Top_Jayce(Ratings):
pass
class NA_Nasus_Top_Jhin(Ratings):
pass
class NA_Nasus_Top_Jinx(Ratings):
pass
class NA_Nasus_Top_Kalista(Ratings):
pass
class NA_Nasus_Top_Karma(Ratings):
pass
class NA_Nasus_Top_Karthus(Ratings):
pass
class NA_Nasus_Top_Kassadin(Ratings):
pass
class NA_Nasus_Top_Katarina(Ratings):
pass
class NA_Nasus_Top_Kayle(Ratings):
pass
class NA_Nasus_Top_Kayn(Ratings):
pass
class NA_Nasus_Top_Kennen(Ratings):
pass
class NA_Nasus_Top_Khazix(Ratings):
pass
class NA_Nasus_Top_Kindred(Ratings):
pass
class NA_Nasus_Top_Kled(Ratings):
pass
class NA_Nasus_Top_KogMaw(Ratings):
pass
class NA_Nasus_Top_Leblanc(Ratings):
pass
class NA_Nasus_Top_LeeSin(Ratings):
pass
class NA_Nasus_Top_Leona(Ratings):
pass
class NA_Nasus_Top_Lissandra(Ratings):
pass
class NA_Nasus_Top_Lucian(Ratings):
pass
class NA_Nasus_Top_Lulu(Ratings):
pass
class NA_Nasus_Top_Lux(Ratings):
pass
class NA_Nasus_Top_Malphite(Ratings):
pass
class NA_Nasus_Top_Malzahar(Ratings):
pass
class NA_Nasus_Top_Maokai(Ratings):
pass
class NA_Nasus_Top_MasterYi(Ratings):
pass
class NA_Nasus_Top_MissFortune(Ratings):
pass
class NA_Nasus_Top_MonkeyKing(Ratings):
pass
class NA_Nasus_Top_Mordekaiser(Ratings):
pass
class NA_Nasus_Top_Morgana(Ratings):
pass
class NA_Nasus_Top_Nami(Ratings):
pass
class NA_Nasus_Top_Nasus(Ratings):
pass
class NA_Nasus_Top_Nautilus(Ratings):
pass
class NA_Nasus_Top_Nidalee(Ratings):
pass
class NA_Nasus_Top_Nocturne(Ratings):
pass
class NA_Nasus_Top_Nunu(Ratings):
pass
class NA_Nasus_Top_Olaf(Ratings):
pass
class NA_Nasus_Top_Orianna(Ratings):
pass
class NA_Nasus_Top_Ornn(Ratings):
pass
class NA_Nasus_Top_Pantheon(Ratings):
pass
class NA_Nasus_Top_Poppy(Ratings):
pass
class NA_Nasus_Top_Quinn(Ratings):
pass
class NA_Nasus_Top_Rakan(Ratings):
pass
class NA_Nasus_Top_Rammus(Ratings):
pass
class NA_Nasus_Top_RekSai(Ratings):
pass
class NA_Nasus_Top_Renekton(Ratings):
pass
class NA_Nasus_Top_Rengar(Ratings):
pass
class NA_Nasus_Top_Riven(Ratings):
pass
class NA_Nasus_Top_Rumble(Ratings):
pass
class NA_Nasus_Top_Ryze(Ratings):
pass
class NA_Nasus_Top_Sejuani(Ratings):
pass
class NA_Nasus_Top_Shaco(Ratings):
pass
class NA_Nasus_Top_Shen(Ratings):
pass
class NA_Nasus_Top_Shyvana(Ratings):
pass
class NA_Nasus_Top_Singed(Ratings):
pass
class NA_Nasus_Top_Sion(Ratings):
pass
class NA_Nasus_Top_Sivir(Ratings):
pass
class NA_Nasus_Top_Skarner(Ratings):
pass
class NA_Nasus_Top_Sona(Ratings):
pass
class NA_Nasus_Top_Soraka(Ratings):
pass
class NA_Nasus_Top_Swain(Ratings):
pass
class NA_Nasus_Top_Syndra(Ratings):
pass
class NA_Nasus_Top_TahmKench(Ratings):
pass
class NA_Nasus_Top_Taliyah(Ratings):
pass
class NA_Nasus_Top_Talon(Ratings):
pass
class NA_Nasus_Top_Taric(Ratings):
pass
class NA_Nasus_Top_Teemo(Ratings):
pass
class NA_Nasus_Top_Thresh(Ratings):
pass
class NA_Nasus_Top_Tristana(Ratings):
pass
class NA_Nasus_Top_Trundle(Ratings):
pass
class NA_Nasus_Top_Tryndamere(Ratings):
pass
class NA_Nasus_Top_TwistedFate(Ratings):
pass
class NA_Nasus_Top_Twitch(Ratings):
pass
class NA_Nasus_Top_Udyr(Ratings):
pass
class NA_Nasus_Top_Urgot(Ratings):
pass
class NA_Nasus_Top_Varus(Ratings):
pass
class NA_Nasus_Top_Vayne(Ratings):
pass
class NA_Nasus_Top_Veigar(Ratings):
pass
class NA_Nasus_Top_Velkoz(Ratings):
pass
class NA_Nasus_Top_Vi(Ratings):
pass
class NA_Nasus_Top_Viktor(Ratings):
pass
class NA_Nasus_Top_Vladimir(Ratings):
pass
class NA_Nasus_Top_Volibear(Ratings):
pass
class NA_Nasus_Top_Warwick(Ratings):
pass
class NA_Nasus_Top_Xayah(Ratings):
pass
class NA_Nasus_Top_Xerath(Ratings):
pass
class NA_Nasus_Top_XinZhao(Ratings):
pass
class NA_Nasus_Top_Yasuo(Ratings):
pass
class NA_Nasus_Top_Yorick(Ratings):
pass
class NA_Nasus_Top_Zac(Ratings):
pass
class NA_Nasus_Top_Zed(Ratings):
pass
class NA_Nasus_Top_Ziggs(Ratings):
pass
class NA_Nasus_Top_Zilean(Ratings):
pass
class NA_Nasus_Top_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
e7d817d3a1000746d41e4624d33beec006982bcb
| 309,261
|
py
|
Python
|
sfof/python/euclid/dm/_stc.py
|
sfarrens/sfof
|
f887abc0dbd1587fd7fbc7148b4704d1b5f4cdac
|
[
"MIT"
] | 13
|
2017-06-15T16:56:29.000Z
|
2021-12-08T20:44:39.000Z
|
sfof/python/euclid/dm/_stc.py
|
umikanero/sfof
|
9aa7b09ccb12311a68373e4e516dee82fa5c428e
|
[
"MIT"
] | 6
|
2020-05-30T07:40:59.000Z
|
2020-11-30T12:25:14.000Z
|
sfof/python/euclid/dm/_stc.py
|
umikanero/sfof
|
9aa7b09ccb12311a68373e4e516dee82fa5c428e
|
[
"MIT"
] | 4
|
2018-02-24T02:12:24.000Z
|
2021-06-03T07:22:15.000Z
|
# /home/sartor/pymodule/euclid/dm/_stc.py
# -*- coding: utf-8 -*-
# PyXB bindings for NM:c85a7aef9dd35afb45dde402fdc86e2ca92a56ad
# Generated 2014-07-24 16:26:39.932475 by PyXB version 1.2.3
# Namespace http://euclid.esa.org/schema/bas/imp/stc [xmlns:stc]
import pyxb
import pyxb.binding
import pyxb.binding.saxer
import io
import pyxb.utils.utility
import pyxb.utils.domutils
import sys
# Unique identifier for bindings created at the same time
_GenerationUID = pyxb.utils.utility.UniqueIdentifier('urn:uuid:869ae486-133e-11e4-88d8-90b11c83965f')
# Version of PyXB used to generate the bindings
_PyXBVersion = '1.2.3'
# Generated bindings are not compatible across PyXB versions
if pyxb.__version__ != _PyXBVersion:
raise pyxb.PyXBVersionError(_PyXBVersion)
# Import bindings for namespaces imported into schema
import pyxb.binding.datatypes
import euclid.dm._dtd as _ImportedBinding_euclid_dm__dtd
import euclid.dm._utd as _ImportedBinding_euclid_dm__utd
# NOTE: All namespace declarations are reserved within the binding
Namespace = pyxb.namespace.NamespaceForURI(u'http://euclid.esa.org/schema/bas/imp/stc', create_if_missing=True)
Namespace.configureCategories(['typeBinding', 'elementBinding'])
def CreateFromDocument (xml_text, default_namespace=None, location_base=None):
"""Parse the given XML and use the document element to create a
Python instance.
@param xml_text An XML document. This should be data (Python 2
str or Python 3 bytes), or a text (Python 2 unicode or Python 3
str) in the L{pyxb._InputEncoding} encoding.
@keyword default_namespace The L{pyxb.Namespace} instance to use as the
default namespace where there is no default namespace in scope.
If unspecified or C{None}, the namespace of the module containing
this function will be used.
@keyword location_base: An object to be recorded as the base of all
L{pyxb.utils.utility.Location} instances associated with events and
objects handled by the parser. You might pass the URI from which
the document was obtained.
"""
if pyxb.XMLStyle_saxer != pyxb._XMLStyle:
dom = pyxb.utils.domutils.StringToDOM(xml_text)
return CreateFromDOM(dom.documentElement)
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
saxer = pyxb.binding.saxer.make_parser(fallback_namespace=default_namespace, location_base=location_base)
handler = saxer.getContentHandler()
xmld = xml_text
if isinstance(xmld, unicode):
xmld = xmld.encode(pyxb._InputEncoding)
saxer.parse(io.BytesIO(xmld))
instance = handler.rootObject()
return instance
def CreateFromDOM (node, default_namespace=None):
"""Create a Python instance from the given DOM node.
The node tag must correspond to an element declaration in this module.
@deprecated: Forcing use of DOM interface is unnecessary; use L{CreateFromDocument}."""
if default_namespace is None:
default_namespace = Namespace.fallbackNamespace()
return pyxb.binding.basis.element.AnyCreateFromDOM(node, default_namespace)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}hsOffsetType
class hsOffsetType (pyxb.binding.datatypes.double):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'hsOffsetType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 365, 1)
_Documentation = None
hsOffsetType._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=hsOffsetType, value=pyxb.binding.datatypes.double(1.0))
hsOffsetType._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=hsOffsetType, value=pyxb.binding.datatypes.double(-1.0))
hsOffsetType._InitializeFacetMap(hsOffsetType._CF_maxInclusive,
hsOffsetType._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'hsOffsetType', hsOffsetType)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}redshiftFrameValue
class redshiftFrameValue (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'redshiftFrameValue')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 488, 1)
_Documentation = None
redshiftFrameValue._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=redshiftFrameValue, enum_prefix=None)
redshiftFrameValue.VELOCITY = redshiftFrameValue._CF_enumeration.addEnumeration(unicode_value=u'VELOCITY', tag=u'VELOCITY')
redshiftFrameValue.REDSHIFT = redshiftFrameValue._CF_enumeration.addEnumeration(unicode_value=u'REDSHIFT', tag=u'REDSHIFT')
redshiftFrameValue._InitializeFacetMap(redshiftFrameValue._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'redshiftFrameValue', redshiftFrameValue)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}dopplerDefinition
class dopplerDefinition (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The Doppler definition used: optical, radio, or pseudo-relativistic (i.e., how is a redshift converted to a velocity); the most common is optical, except when the reference is LSR (usually radio)"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'dopplerDefinition')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 507, 1)
_Documentation = u'The Doppler definition used: optical, radio, or pseudo-relativistic (i.e., how is a redshift converted to a velocity); the most common is optical, except when the reference is LSR (usually radio)'
dopplerDefinition._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=dopplerDefinition, enum_prefix=None)
dopplerDefinition.OPTICAL = dopplerDefinition._CF_enumeration.addEnumeration(unicode_value=u'OPTICAL', tag=u'OPTICAL')
dopplerDefinition.RADIO = dopplerDefinition._CF_enumeration.addEnumeration(unicode_value=u'RADIO', tag=u'RADIO')
dopplerDefinition.RELATIVISTIC = dopplerDefinition._CF_enumeration.addEnumeration(unicode_value=u'RELATIVISTIC', tag=u'RELATIVISTIC')
dopplerDefinition._InitializeFacetMap(dopplerDefinition._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'dopplerDefinition', dopplerDefinition)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}referencePosition
class referencePosition (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The list of referencePosition is derived from STC metadata Linear String Implementation V0.10. Either a "known place" such as geocenter or barycenter, or a position defined in a known coordinate system. TOPOCENTER : Location of the observer/telescope, BARYCENTER : Barycenter of the solar system HELIOCENTER : Center of the sun GEOCENTER : Center of the earth GALACTIC_CENTER : Center of the Galaxy LOCAL_GROUP_CENTER : Center of the Local Group MOON : Center of the Moon EMBARYCENTER : Barycenter of the Earth-Moon system MERCURY : VENUS : MARS : JUPITER : SATURN : URANUS : NEPTUNE : PLUTO : UNKNOWNRefPos : Unknown origin ; the producer is responsible for assigning a default"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'referencePosition')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 518, 1)
_Documentation = u'The list of referencePosition is derived from STC metadata Linear String Implementation V0.10. Either a "known place" such as geocenter or barycenter, or a position defined in a known coordinate system. TOPOCENTER : Location of the observer/telescope, BARYCENTER : Barycenter of the solar system HELIOCENTER : Center of the sun GEOCENTER : Center of the earth GALACTIC_CENTER : Center of the Galaxy LOCAL_GROUP_CENTER : Center of the Local Group MOON : Center of the Moon EMBARYCENTER : Barycenter of the Earth-Moon system MERCURY : VENUS : MARS : JUPITER : SATURN : URANUS : NEPTUNE : PLUTO : UNKNOWNRefPos : Unknown origin ; the producer is responsible for assigning a default'
referencePosition._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=referencePosition, enum_prefix=None)
referencePosition.TOPOCENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'TOPOCENTER', tag=u'TOPOCENTER')
referencePosition.BARYCENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'BARYCENTER', tag=u'BARYCENTER')
referencePosition.HELIOCENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'HELIOCENTER', tag=u'HELIOCENTER')
referencePosition.GEOCENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'GEOCENTER', tag=u'GEOCENTER')
referencePosition.GALACTIC_CENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'GALACTIC_CENTER', tag=u'GALACTIC_CENTER')
referencePosition.LOCAL_GROUP_CENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'LOCAL_GROUP_CENTER', tag=u'LOCAL_GROUP_CENTER')
referencePosition.MOON = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'MOON', tag=u'MOON')
referencePosition.EMBARYCENTER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'EMBARYCENTER', tag=u'EMBARYCENTER')
referencePosition.MERCURY = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'MERCURY', tag=u'MERCURY')
referencePosition.VENUS = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'VENUS', tag=u'VENUS')
referencePosition.MARS = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'MARS', tag=u'MARS')
referencePosition.JUPITER = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'JUPITER', tag=u'JUPITER')
referencePosition.SATURN = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'SATURN', tag=u'SATURN')
referencePosition.URANUS = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'URANUS', tag=u'URANUS')
referencePosition.NEPTUNE = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'NEPTUNE', tag=u'NEPTUNE')
referencePosition.PLUTO = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'PLUTO', tag=u'PLUTO')
referencePosition.UNKNOWNRefPos = referencePosition._CF_enumeration.addEnumeration(unicode_value=u'UNKNOWNRefPos', tag=u'UNKNOWNRefPos')
referencePosition._InitializeFacetMap(referencePosition._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'referencePosition', referencePosition)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}coordRefFrame
class coordRefFrame (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The different types of CoordRefFrame come from the list in STC ivoa V1.3. Sub list defined in 'STC-S metadata linear string implementation' is described here. Take care that for ICRS type : no equinox is required, FK[45] type needs an equinox and geodeticType refers to IAU 1976 reference spheroid . FK4 needs a Besselian epoch, FK5 needs a Julian epoch, ECLIPTIC Ecliptic coordinates shall be assumed to have an equinox of J2000 with respect to ICRS (to conform with common abuse, "J2000" and "FK5" will both be interpreted as "FK5 J2000" ; "B1950" and "FK4" will be interpreted as "FK4 B1950", GALACTIC : Galactic coordinates; first system, GALACTIC_II : Galactic coordinates; second system, SUPER_GALACTIC : SuperGalactic coordinates, GEO_C : The Geocentric (co-rotating) reference frame, GEO_D : The Geodetic reference frame; semi-major axis and inverse flattening may be provided to define the reference spheroid; the default is the IAU 1976 reference spheroid, UNKNOWNFrame : Unknown space reference frame; the producer is responsible for assigning a default"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coordRefFrame')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 563, 1)
_Documentation = u'The different types of CoordRefFrame come from the list in STC ivoa V1.3. Sub list defined in \'STC-S metadata linear string implementation\' is described here. Take care that for ICRS type : no equinox is required, FK[45] type needs an equinox and geodeticType refers to IAU 1976 reference spheroid . FK4 needs a Besselian epoch, FK5 needs a Julian epoch, ECLIPTIC Ecliptic coordinates shall be assumed to have an equinox of J2000 with respect to ICRS (to conform with common abuse, "J2000" and "FK5" will both be interpreted as "FK5 J2000" ; "B1950" and "FK4" will be interpreted as "FK4 B1950", GALACTIC : Galactic coordinates; first system, GALACTIC_II : Galactic coordinates; second system, SUPER_GALACTIC : SuperGalactic coordinates, GEO_C : The Geocentric (co-rotating) reference frame, GEO_D : The Geodetic reference frame; semi-major axis and inverse flattening may be provided to define the reference spheroid; the default is the IAU 1976 reference spheroid, UNKNOWNFrame : Unknown space reference frame; the producer is responsible for assigning a default'
coordRefFrame._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=coordRefFrame, enum_prefix=None)
coordRefFrame.ICRS = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'ICRS', tag=u'ICRS')
coordRefFrame.FK4 = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'FK4', tag=u'FK4')
coordRefFrame.FK5 = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'FK5', tag=u'FK5')
coordRefFrame.J2000 = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'J2000', tag=u'J2000')
coordRefFrame.B1950 = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'B1950', tag=u'B1950')
coordRefFrame.ECLIPTIC = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'ECLIPTIC', tag=u'ECLIPTIC')
coordRefFrame.GALACTIC_I = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'GALACTIC_I', tag=u'GALACTIC_I')
coordRefFrame.GALACTIC_II = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'GALACTIC_II', tag=u'GALACTIC_II')
coordRefFrame.SUPER_GALACTIC = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'SUPER_GALACTIC', tag=u'SUPER_GALACTIC')
coordRefFrame.GEO_C = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'GEO_C', tag=u'GEO_C')
coordRefFrame.GEO_D = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'GEO_D', tag=u'GEO_D')
coordRefFrame.UNKNOWNFrame = coordRefFrame._CF_enumeration.addEnumeration(unicode_value=u'UNKNOWNFrame', tag=u'UNKNOWNFrame')
coordRefFrame._InitializeFacetMap(coordRefFrame._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'coordRefFrame', coordRefFrame)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}coordNaxesValue
class coordNaxesValue (pyxb.binding.datatypes.short):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coordNaxesValue')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 583, 1)
_Documentation = None
coordNaxesValue._CF_maxInclusive = pyxb.binding.facets.CF_maxInclusive(value_datatype=coordNaxesValue, value=pyxb.binding.datatypes.short(3))
coordNaxesValue._CF_minInclusive = pyxb.binding.facets.CF_minInclusive(value_datatype=coordNaxesValue, value=pyxb.binding.datatypes.short(1))
coordNaxesValue._InitializeFacetMap(coordNaxesValue._CF_maxInclusive,
coordNaxesValue._CF_minInclusive)
Namespace.addCategoryObject('typeBinding', u'coordNaxesValue', coordNaxesValue)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}handednessValue
class handednessValue (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""An atomic simple type."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'handednessValue')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 589, 1)
_Documentation = None
handednessValue._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=handednessValue, enum_prefix=None)
handednessValue.left = handednessValue._CF_enumeration.addEnumeration(unicode_value=u'left', tag=u'left')
handednessValue.right = handednessValue._CF_enumeration.addEnumeration(unicode_value=u'right', tag=u'right')
handednessValue._InitializeFacetMap(handednessValue._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'handednessValue', handednessValue)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}projection
class projection (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""The spherical-to-cartesian or cartesian-to-cartesian projection to be used; c-to-c projections are marked as such, all others are to be interpreted as s-to-c"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'projection')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 630, 1)
_Documentation = u'The spherical-to-cartesian or cartesian-to-cartesian projection to be used; c-to-c projections are marked as such, all others are to be interpreted as s-to-c'
projection._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=projection, enum_prefix=None)
projection.emptyString = projection._CF_enumeration.addEnumeration(unicode_value=u'', tag='emptyString')
projection.LOG = projection._CF_enumeration.addEnumeration(unicode_value=u'LOG', tag=u'LOG')
projection.TAN = projection._CF_enumeration.addEnumeration(unicode_value=u'TAN', tag=u'TAN')
projection.SIN = projection._CF_enumeration.addEnumeration(unicode_value=u'SIN', tag=u'SIN')
projection.STG = projection._CF_enumeration.addEnumeration(unicode_value=u'STG', tag=u'STG')
projection.ARC = projection._CF_enumeration.addEnumeration(unicode_value=u'ARC', tag=u'ARC')
projection.ZEA = projection._CF_enumeration.addEnumeration(unicode_value=u'ZEA', tag=u'ZEA')
projection.AIR = projection._CF_enumeration.addEnumeration(unicode_value=u'AIR', tag=u'AIR')
projection.CEA = projection._CF_enumeration.addEnumeration(unicode_value=u'CEA', tag=u'CEA')
projection.CAR = projection._CF_enumeration.addEnumeration(unicode_value=u'CAR', tag=u'CAR')
projection.MER = projection._CF_enumeration.addEnumeration(unicode_value=u'MER', tag=u'MER')
projection.SFL = projection._CF_enumeration.addEnumeration(unicode_value=u'SFL', tag=u'SFL')
projection.PAR = projection._CF_enumeration.addEnumeration(unicode_value=u'PAR', tag=u'PAR')
projection.MOL = projection._CF_enumeration.addEnumeration(unicode_value=u'MOL', tag=u'MOL')
projection.AIT = projection._CF_enumeration.addEnumeration(unicode_value=u'AIT', tag=u'AIT')
projection.COE = projection._CF_enumeration.addEnumeration(unicode_value=u'COE', tag=u'COE')
projection.COD = projection._CF_enumeration.addEnumeration(unicode_value=u'COD', tag=u'COD')
projection.COO = projection._CF_enumeration.addEnumeration(unicode_value=u'COO', tag=u'COO')
projection.BON = projection._CF_enumeration.addEnumeration(unicode_value=u'BON', tag=u'BON')
projection.PCO = projection._CF_enumeration.addEnumeration(unicode_value=u'PCO', tag=u'PCO')
projection.TSC = projection._CF_enumeration.addEnumeration(unicode_value=u'TSC', tag=u'TSC')
projection.CSC = projection._CF_enumeration.addEnumeration(unicode_value=u'CSC', tag=u'CSC')
projection.QSC = projection._CF_enumeration.addEnumeration(unicode_value=u'QSC', tag=u'QSC')
projection._InitializeFacetMap(projection._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'projection', projection)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}timeScale
class timeScale (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin):
"""This type refers to : timeScaleType from stc IVOA.The actual time scale is derived from Representations of Time Coordinates in FITS Time and Relative Dimension in Space (V0.93) Astronomy and Astrophysics manuscript no. WCSPaperV0.93 ESO 2012 March 21, 2012. The original XML schema is derived from stc-v1.30 IVOA. TT Terrestrial Time; the basis for ephemerides, TDT Obsolete synonym for TT ET Ephemeris Time; predecessor of, and continuous with, TT TDB Barycentric Dynamic Time:the independent variable in planetay ephemerides; time at the solar system barycenter synchronous with TT on an annual basis; sometimes called TEB Barycentric Ephemeris Time: time at the solar system barycenter synchronous with TT on an annual basis; a deprecated synonym of TDB.TCG Terrestrial Coordinate Time TAI International Atomic Time; runs 32.184 s behind TT IAT Synonym for TAI UTC Coordinated Universal Time; currently (2006) runs 33 leapseconds behind TAI GPS Global Positioning System's time scale; runs 19 s behind TAI, 51.184 s behind TT LST Local Siderial Time; only for ground-based observations; note that the second is shorter GMST Greenwich Mean Siderial Time; only for ground-based observations; note that the second is shorter LOCAL Only to be used for simulations in conjunction with a relocatable spatial frame. The enumeration comes from paragraph 5-1 of STC-S metadata linear string implementation V0.10."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeScale')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 754, 1)
_Documentation = u"This type refers to : timeScaleType from stc IVOA.The actual time scale is derived from Representations of Time Coordinates in FITS Time and Relative Dimension in Space (V0.93) Astronomy and Astrophysics manuscript no. WCSPaperV0.93 ESO 2012 March 21, 2012. The original XML schema is derived from stc-v1.30 IVOA. TT Terrestrial Time; the basis for ephemerides, TDT Obsolete synonym for TT ET Ephemeris Time; predecessor of, and continuous with, TT TDB Barycentric Dynamic Time:the independent variable in planetay ephemerides; time at the solar system barycenter synchronous with TT on an annual basis; sometimes called TEB Barycentric Ephemeris Time: time at the solar system barycenter synchronous with TT on an annual basis; a deprecated synonym of TDB.TCG Terrestrial Coordinate Time TAI International Atomic Time; runs 32.184 s behind TT IAT Synonym for TAI UTC Coordinated Universal Time; currently (2006) runs 33 leapseconds behind TAI GPS Global Positioning System's time scale; runs 19 s behind TAI, 51.184 s behind TT LST Local Siderial Time; only for ground-based observations; note that the second is shorter GMST Greenwich Mean Siderial Time; only for ground-based observations; note that the second is shorter LOCAL Only to be used for simulations in conjunction with a relocatable spatial frame. The enumeration comes from paragraph 5-1 of STC-S metadata linear string implementation V0.10."
timeScale._CF_enumeration = pyxb.binding.facets.CF_enumeration(value_datatype=timeScale, enum_prefix=None)
timeScale.TT = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TT', tag=u'TT')
timeScale.TDT = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TDT', tag=u'TDT')
timeScale.ET = timeScale._CF_enumeration.addEnumeration(unicode_value=u'ET', tag=u'ET')
timeScale.TDB = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TDB', tag=u'TDB')
timeScale.TEB = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TEB', tag=u'TEB')
timeScale.TCG = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TCG', tag=u'TCG')
timeScale.TCB = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TCB', tag=u'TCB')
timeScale.TAI = timeScale._CF_enumeration.addEnumeration(unicode_value=u'TAI', tag=u'TAI')
timeScale.IAT = timeScale._CF_enumeration.addEnumeration(unicode_value=u'IAT', tag=u'IAT')
timeScale.UTC = timeScale._CF_enumeration.addEnumeration(unicode_value=u'UTC', tag=u'UTC')
timeScale.GPS = timeScale._CF_enumeration.addEnumeration(unicode_value=u'GPS', tag=u'GPS')
timeScale.LST = timeScale._CF_enumeration.addEnumeration(unicode_value=u'LST', tag=u'LST')
timeScale.GMST = timeScale._CF_enumeration.addEnumeration(unicode_value=u'GMST', tag=u'GMST')
timeScale.LOCAL = timeScale._CF_enumeration.addEnumeration(unicode_value=u'LOCAL', tag=u'LOCAL')
timeScale._InitializeFacetMap(timeScale._CF_enumeration)
Namespace.addCategoryObject('typeBinding', u'timeScale', timeScale)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}secDateTime
class secDateTime (pyxb.binding.datatypes.dateTime):
"""A date-time value with a precision of one second. This date-time format allows the definition of TAI date and UTC date. date-time value is restricted to the yyyy-mm-ddThh:mm:ss[Z] pattern and excluding thus : a fractional seconds definition (value has a precision of one second), a TimeZone definition."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'secDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 824, 1)
_Documentation = u'A date-time value with a precision of one second. This date-time format allows the definition of TAI date and UTC date. date-time value is restricted to the yyyy-mm-ddThh:mm:ss[Z] pattern and excluding thus : a fractional seconds definition (value has a precision of one second), a TimeZone definition.'
secDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
secDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\dZ?')
secDateTime._InitializeFacetMap(secDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'secDateTime', secDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}millisecDateTime
class millisecDateTime (pyxb.binding.datatypes.dateTime):
"""A date-time value with a precision of one millisecond. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sss[Z] pattern"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'millisecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 835, 1)
_Documentation = u'A date-time value with a precision of one millisecond. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sss[Z] pattern'
millisecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
millisecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\dZ?')
millisecDateTime._InitializeFacetMap(millisecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'millisecDateTime', millisecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}microsecDateTime
class microsecDateTime (pyxb.binding.datatypes.dateTime):
"""A date-time value with a precision of one microsecond. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss[Z] pattern."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'microsecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 846, 1)
_Documentation = u'A date-time value with a precision of one microsecond. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss[Z] pattern.'
microsecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
microsecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d\\d\\d\\dZ?')
microsecDateTime._InitializeFacetMap(microsecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'microsecDateTime', microsecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}dateTime
class dateTime (pyxb.binding.datatypes.dateTime):
"""A date-time value. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss[.sss][Z] pattern and excluding thus a TimeZone definition."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'dateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 857, 1)
_Documentation = u'A date-time value. This date-time format allows the definition of TAI date and UTC date. Date-time value restricted to the yyyy-mm-ddThh:mm:ss[.sss][Z] pattern and excluding thus a TimeZone definition.'
dateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
dateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(\\.\\d+)?Z?')
dateTime._InitializeFacetMap(dateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'dateTime', dateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}TAIMicrosecDateTime
class TAIMicrosecDateTime (pyxb.binding.datatypes.dateTime):
"""An non UTC date-time value with a precision of one microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss pattern"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'TAIMicrosecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 868, 1)
_Documentation = u'An non UTC date-time value with a precision of one microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss pattern'
TAIMicrosecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
TAIMicrosecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d\\d\\d\\d')
TAIMicrosecDateTime._InitializeFacetMap(TAIMicrosecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'TAIMicrosecDateTime', TAIMicrosecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}TAIMillisecsecDateTime
class TAIMillisecsecDateTime (pyxb.binding.datatypes.dateTime):
"""An non UTC date-time value with a precision of one millisecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss pattern"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'TAIMillisecsecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 878, 1)
_Documentation = u'An non UTC date-time value with a precision of one millisecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssss pattern'
TAIMillisecsecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
TAIMillisecsecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d')
TAIMillisecsecDateTime._InitializeFacetMap(TAIMillisecsecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'TAIMillisecsecDateTime', TAIMillisecsecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}UTCDateTime
class UTCDateTime (pyxb.binding.datatypes.dateTime):
"""An UTC date-time value. date-time value restricted to the
yyyy-mm-ddThh:mm:ss(.sss) Z pattern. Z character is mandatory."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 899, 1)
_Documentation = u'An UTC date-time value. date-time value restricted to the\n\t\t\t\t\t\tyyyy-mm-ddThh:mm:ss(.sss) Z pattern. Z character is mandatory.'
UTCDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
UTCDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d(\\.\\d+)?Z')
UTCDateTime._InitializeFacetMap(UTCDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'UTCDateTime', UTCDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}UTCMicrosecDateTime
class UTCMicrosecDateTime (pyxb.binding.datatypes.dateTime):
"""An UTC date-time value with a precision of one microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssssZ pattern"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCMicrosecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 921, 1)
_Documentation = u'An UTC date-time value with a precision of one microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.ssssssZ pattern'
UTCMicrosecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
UTCMicrosecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d.\\d\\d\\d\\d\\d\\d?Z')
UTCMicrosecDateTime._InitializeFacetMap(UTCMicrosecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'UTCMicrosecDateTime', UTCMicrosecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}UTCMillisecDateTime
class UTCMillisecDateTime (pyxb.binding.datatypes.dateTime):
"""An UTC date-time value with a precision of one millisecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssZ pattern"""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCMillisecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 942, 1)
_Documentation = u'An UTC date-time value with a precision of one millisecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssZ pattern'
UTCMillisecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
UTCMillisecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d?Z')
UTCMillisecDateTime._InitializeFacetMap(UTCMillisecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'UTCMillisecDateTime', UTCMillisecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}UTCSecDateTime
class UTCSecDateTime (pyxb.binding.datatypes.dateTime):
"""An UTC date-time value with a precision of one second. date-time value restricted to the yyyy-mm-ddThh:mm:ssZ pattern and excluding thus :a fractional seconds definition (value has a precision of one second), a TimeZone definition. """
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCSecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 963, 1)
_Documentation = u'An UTC date-time value with a precision of one second. date-time value restricted to the yyyy-mm-ddThh:mm:ssZ pattern and excluding thus :a fractional seconds definition (value has a precision of one second), a TimeZone definition. '
UTCSecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
UTCSecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d?Z')
UTCSecDateTime._InitializeFacetMap(UTCSecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'UTCSecDateTime', UTCSecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}UTCTenthMicrosecDateTime
class UTCTenthMicrosecDateTime (pyxb.binding.datatypes.dateTime):
"""An UTC date-time value with a precision of one tenth-of-a-microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssssssZ pattern."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCTenthMicrosecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 994, 1)
_Documentation = u'An UTC date-time value with a precision of one tenth-of-a-microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssssssZ pattern.'
UTCTenthMicrosecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
UTCTenthMicrosecDateTime._CF_pattern.addPattern(pattern=u'\\d\\d\\d\\d-\\d\\d-\\d\\dT\\d\\d:\\d\\d:\\d\\d\\.\\d\\d\\d\\d\\d\\d\\d?Z')
UTCTenthMicrosecDateTime._InitializeFacetMap(UTCTenthMicrosecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'UTCTenthMicrosecDateTime', UTCTenthMicrosecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}nonUTCTenthMicrosecDateTime
class nonUTCTenthMicrosecDateTime (pyxb.binding.datatypes.dateTime):
"""A non UTC (a TAI) date-time value with a precision of one tenth-of-a-microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssssss pattern."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'nonUTCTenthMicrosecDateTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1005, 1)
_Documentation = u'A non UTC (a TAI) date-time value with a precision of one tenth-of-a-microsecond. Date-time value restricted to the yyyy-mm-ddThh:mm:ss.sssssss pattern.'
nonUTCTenthMicrosecDateTime._CF_pattern = pyxb.binding.facets.CF_pattern()
nonUTCTenthMicrosecDateTime._CF_pattern.addPattern(pattern=u'\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2}.\\d{7}')
nonUTCTenthMicrosecDateTime._InitializeFacetMap(nonUTCTenthMicrosecDateTime._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'nonUTCTenthMicrosecDateTime', nonUTCTenthMicrosecDateTime)
# Atomic simple type: {http://euclid.esa.org/schema/bas/imp/stc}secDuration
class secDuration (pyxb.binding.datatypes.string):
"""Duration in seconds. Accuracy is microsec."""
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'secDuration')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1016, 1)
_Documentation = u'Duration in seconds. Accuracy is microsec.'
secDuration._CF_pattern = pyxb.binding.facets.CF_pattern()
secDuration._CF_pattern.addPattern(pattern=u'\\d(\\.\\d{0,6})')
secDuration._InitializeFacetMap(secDuration._CF_pattern)
Namespace.addCategoryObject('typeBinding', u'secDuration', secDuration)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType with content type ELEMENT_ONLY
class coordScalarIntervalType (pyxb.binding.basis.complexTypeDefinition):
"""Scalar coordinate interval type defined by the sequence : Lower bound of interval, limit included. Upper bound of interval, limit included. Two optional attributes are : Fraction of interval that is occupied by data and frameId."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coordScalarIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 122, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element LoLimit uses Python identifier LoLimit
__LoLimit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'LoLimit'), 'LoLimit', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_LoLimit', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3), )
LoLimit = property(__LoLimit.value, __LoLimit.set, None, u'Lower bound of interval.')
# Element HiLimit uses Python identifier HiLimit
__HiLimit = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'HiLimit'), 'HiLimit', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_HiLimit', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3), )
HiLimit = property(__HiLimit.value, __HiLimit.set, None, u'Upper bound of interval.')
# Attribute lo_include uses Python identifier lo_include
__lo_include = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'lo_include'), 'lo_include', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_lo_include', pyxb.binding.datatypes.boolean, unicode_default=u'true')
__lo_include._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 138, 2)
__lo_include._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 138, 2)
lo_include = property(__lo_include.value, __lo_include.set, None, u'Limit to be included, if true lo limit is included.')
# Attribute hi_include uses Python identifier hi_include
__hi_include = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'hi_include'), 'hi_include', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_hi_include', pyxb.binding.datatypes.boolean, unicode_default=u'true')
__hi_include._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 143, 2)
__hi_include._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 143, 2)
hi_include = property(__hi_include.value, __hi_include.set, None, u'Limit to be included, if true hi limit is included.')
# Attribute fill_factor uses Python identifier fill_factor
__fill_factor = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'fill_factor'), 'fill_factor', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_fill_factor', pyxb.binding.datatypes.float, unicode_default=u'1.0')
__fill_factor._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 148, 2)
__fill_factor._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 148, 2)
fill_factor = property(__fill_factor.value, __fill_factor.set, None, u'Fraction of interval that is occupied by data.')
# Attribute FrameId uses Python identifier FrameId
__FrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'FrameId'), 'FrameId', '__httpeuclid_esa_orgschemabasimpstc_coordScalarIntervalType_FrameId', pyxb.binding.datatypes.string)
__FrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 153, 2)
__FrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 153, 2)
FrameId = property(__FrameId.value, __FrameId.set, None, None)
_ElementMap.update({
__LoLimit.name() : __LoLimit,
__HiLimit.name() : __HiLimit
})
_AttributeMap.update({
__lo_include.name() : __lo_include,
__hi_include.name() : __hi_include,
__fill_factor.name() : __fill_factor,
__FrameId.name() : __FrameId
})
Namespace.addCategoryObject('typeBinding', u'coordScalarIntervalType', coordScalarIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}coord2VecIntervalType with content type ELEMENT_ONLY
class coord2VecIntervalType (pyxb.binding.basis.complexTypeDefinition):
"""2-D coordinate interval type"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coord2VecIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 156, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element LoLimit2Vec uses Python identifier LoLimit2Vec
__LoLimit2Vec = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'LoLimit2Vec'), 'LoLimit2Vec', '__httpeuclid_esa_orgschemabasimpstc_coord2VecIntervalType_LoLimit2Vec', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 161, 3), )
LoLimit2Vec = property(__LoLimit2Vec.value, __LoLimit2Vec.set, None, None)
# Element HiLimit2Vec uses Python identifier HiLimit2Vec
__HiLimit2Vec = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'HiLimit2Vec'), 'HiLimit2Vec', '__httpeuclid_esa_orgschemabasimpstc_coord2VecIntervalType_HiLimit2Vec', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 162, 3), )
HiLimit2Vec = property(__HiLimit2Vec.value, __HiLimit2Vec.set, None, None)
# Attribute fill_factor uses Python identifier fill_factor
__fill_factor = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'fill_factor'), 'fill_factor', '__httpeuclid_esa_orgschemabasimpstc_coord2VecIntervalType_fill_factor', pyxb.binding.datatypes.float, unicode_default=u'1.0')
__fill_factor._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 164, 2)
__fill_factor._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 164, 2)
fill_factor = property(__fill_factor.value, __fill_factor.set, None, u'Fraction of interval that is occupied by data')
# Attribute FrameId uses Python identifier FrameId
__FrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'FrameId'), 'FrameId', '__httpeuclid_esa_orgschemabasimpstc_coord2VecIntervalType_FrameId', pyxb.binding.datatypes.string)
__FrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 169, 2)
__FrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 169, 2)
FrameId = property(__FrameId.value, __FrameId.set, None, None)
_ElementMap.update({
__LoLimit2Vec.name() : __LoLimit2Vec,
__HiLimit2Vec.name() : __HiLimit2Vec
})
_AttributeMap.update({
__fill_factor.name() : __fill_factor,
__FrameId.name() : __FrameId
})
Namespace.addCategoryObject('typeBinding', u'coord2VecIntervalType', coord2VecIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}coord3VecIntervalType with content type ELEMENT_ONLY
class coord3VecIntervalType (pyxb.binding.basis.complexTypeDefinition):
"""3-D coordinate interval type"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coord3VecIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 172, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element LoLimit3Vec uses Python identifier LoLimit3Vec
__LoLimit3Vec = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'LoLimit3Vec'), 'LoLimit3Vec', '__httpeuclid_esa_orgschemabasimpstc_coord3VecIntervalType_LoLimit3Vec', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 177, 3), )
LoLimit3Vec = property(__LoLimit3Vec.value, __LoLimit3Vec.set, None, None)
# Element HiLimit3Vec uses Python identifier HiLimit3Vec
__HiLimit3Vec = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'HiLimit3Vec'), 'HiLimit3Vec', '__httpeuclid_esa_orgschemabasimpstc_coord3VecIntervalType_HiLimit3Vec', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 178, 3), )
HiLimit3Vec = property(__HiLimit3Vec.value, __HiLimit3Vec.set, None, None)
# Attribute fill_factor uses Python identifier fill_factor
__fill_factor = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'fill_factor'), 'fill_factor', '__httpeuclid_esa_orgschemabasimpstc_coord3VecIntervalType_fill_factor', pyxb.binding.datatypes.float, unicode_default=u'1.0')
__fill_factor._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 180, 2)
__fill_factor._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 180, 2)
fill_factor = property(__fill_factor.value, __fill_factor.set, None, u'Fraction of interval that is occupied by data')
# Attribute FrameId uses Python identifier FrameId
__FrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'FrameId'), 'FrameId', '__httpeuclid_esa_orgschemabasimpstc_coord3VecIntervalType_FrameId', pyxb.binding.datatypes.string)
__FrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 185, 2)
__FrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 185, 2)
FrameId = property(__FrameId.value, __FrameId.set, None, None)
_ElementMap.update({
__LoLimit3Vec.name() : __LoLimit3Vec,
__HiLimit3Vec.name() : __HiLimit3Vec
})
_AttributeMap.update({
__fill_factor.name() : __fill_factor,
__FrameId.name() : __FrameId
})
Namespace.addCategoryObject('typeBinding', u'coord3VecIntervalType', coord3VecIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}allSkyType with content type EMPTY
class allSkyType (pyxb.binding.basis.complexTypeDefinition):
"""AllSky type: just a shape without any child elements"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'allSkyType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 239, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'allSkyType', allSkyType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}circleType with content type ELEMENT_ONLY
class circleType (pyxb.binding.basis.complexTypeDefinition):
"""Circle shape: center and radius"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'circleType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 245, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Center uses Python identifier Center
__Center = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Center'), 'Center', '__httpeuclid_esa_orgschemabasimpstc_circleType_Center', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 250, 3), )
Center = property(__Center.value, __Center.set, None, u"The coordinates of the circle's center")
# Element Radius uses Python identifier Radius
__Radius = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Radius'), 'Radius', '__httpeuclid_esa_orgschemabasimpstc_circleType_Radius', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 255, 3), )
Radius = property(__Radius.value, __Radius.set, None, u'The radius of the circle')
_ElementMap.update({
__Center.name() : __Center,
__Radius.name() : __Radius
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'circleType', circleType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}ellipseType with content type ELEMENT_ONLY
class ellipseType (pyxb.binding.basis.complexTypeDefinition):
"""Ellipse shape: center, semi-major, semi-minor axis and position angle; in spherical coordinates defined as the shape cut out of the sphere by a cone with elliptical cross-section"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'ellipseType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 263, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Center uses Python identifier Center
__Center = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Center'), 'Center', '__httpeuclid_esa_orgschemabasimpstc_ellipseType_Center', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 268, 3), )
Center = property(__Center.value, __Center.set, None, u"The coordinates of the circle's center")
# Element SemiMajorAxis uses Python identifier SemiMajorAxis
__SemiMajorAxis = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SemiMajorAxis'), 'SemiMajorAxis', '__httpeuclid_esa_orgschemabasimpstc_ellipseType_SemiMajorAxis', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 273, 3), )
SemiMajorAxis = property(__SemiMajorAxis.value, __SemiMajorAxis.set, None, u'The radius of the circle')
# Element SemiMinorAxis uses Python identifier SemiMinorAxis
__SemiMinorAxis = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SemiMinorAxis'), 'SemiMinorAxis', '__httpeuclid_esa_orgschemabasimpstc_ellipseType_SemiMinorAxis', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 278, 3), )
SemiMinorAxis = property(__SemiMinorAxis.value, __SemiMinorAxis.set, None, u'Half the minor axis of the ellipse, in radius_unit')
# Element PosAngle uses Python identifier PosAngle
__PosAngle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PosAngle'), 'PosAngle', '__httpeuclid_esa_orgschemabasimpstc_ellipseType_PosAngle', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 283, 3), )
PosAngle = property(__PosAngle.value, __PosAngle.set, None, u'Position angle of major axis (Radius).')
_ElementMap.update({
__Center.name() : __Center,
__SemiMajorAxis.name() : __SemiMajorAxis,
__SemiMinorAxis.name() : __SemiMinorAxis,
__PosAngle.name() : __PosAngle
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'ellipseType', ellipseType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}smallCircleType with content type ELEMENT_ONLY
class smallCircleType (pyxb.binding.basis.complexTypeDefinition):
"""smallCircleType indicates in polygons that side is along small circle; with optional pole"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'smallCircleType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 292, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Pole uses Python identifier Pole
__Pole = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Pole'), 'Pole', '__httpeuclid_esa_orgschemabasimpstc_smallCircleType_Pole', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 297, 3), )
Pole = property(__Pole.value, __Pole.set, None, None)
_ElementMap.update({
__Pole.name() : __Pole
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'smallCircleType', smallCircleType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}vertexType with content type ELEMENT_ONLY
class vertexType (pyxb.binding.basis.complexTypeDefinition):
"""Vertex is a position with optional SmallCircle element; the SmallCircle element indicates that the polygon side formed by that vertex and its predecessor vertex is a small circle, rather than a great circle; SmallCircle has no meaning in Cartesian coordinates"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'vertexType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 301, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Position uses Python identifier Position
__Position = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Position'), 'Position', '__httpeuclid_esa_orgschemabasimpstc_vertexType_Position', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 306, 3), )
Position = property(__Position.value, __Position.set, None, None)
# Element SmallCircle uses Python identifier SmallCircle
__SmallCircle = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SmallCircle'), 'SmallCircle', '__httpeuclid_esa_orgschemabasimpstc_vertexType_SmallCircle', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 307, 3), )
SmallCircle = property(__SmallCircle.value, __SmallCircle.set, None, None)
_ElementMap.update({
__Position.name() : __Position,
__SmallCircle.name() : __SmallCircle
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'vertexType', vertexType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}polygonType with content type ELEMENT_ONLY
class polygonType (pyxb.binding.basis.complexTypeDefinition):
"""Polygon: one or more vertices; counter-clockwise (as seen from "inside" or from "top") encircled area is enclosed; sides should span less than 180 deg in each coordinate if spherical; a polygon may not intersect itself"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'polygonType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 311, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Vertex uses Python identifier Vertex
__Vertex = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Vertex'), 'Vertex', '__httpeuclid_esa_orgschemabasimpstc_polygonType_Vertex', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 316, 3), )
Vertex = property(__Vertex.value, __Vertex.set, None, u'In order to form polygons, vertices are to be connected with straight line segments. In the case of spherical coordinates: greatcircle segments; if a smallCircle element si present, the vertex and its predecessor are to be connected with a smallcircle, by default in the CoordSys that is referenced; optionally, a pole may be specified (other than the CoordSys pole) that defines the smallcircle system')
_ElementMap.update({
__Vertex.name() : __Vertex
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'polygonType', polygonType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}boxType with content type ELEMENT_ONLY
class boxType (pyxb.binding.basis.complexTypeDefinition):
"""Box shape: a rectangle defined by its center and size on both dimensions; since it is a polygon, it is redundant, but simple rectangles with great circle sides are awkward to define in spherical coordinates"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'boxType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 324, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Center uses Python identifier Center
__Center = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Center'), 'Center', '__httpeuclid_esa_orgschemabasimpstc_boxType_Center', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 329, 3), )
Center = property(__Center.value, __Center.set, None, u"The coordinates of the box's center")
# Element Size uses Python identifier Size
__Size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Size'), 'Size', '__httpeuclid_esa_orgschemabasimpstc_boxType_Size', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 334, 3), )
Size = property(__Size.value, __Size.set, None, u"The lengths of the box's sides")
_ElementMap.update({
__Center.name() : __Center,
__Size.name() : __Size
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'boxType', boxType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}sectorType with content type ELEMENT_ONLY
class sectorType (pyxb.binding.basis.complexTypeDefinition):
"""A sector is the counter-clockwise area between two half-lines"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'sectorType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 342, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Position uses Python identifier Position
__Position = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Position'), 'Position', '__httpeuclid_esa_orgschemabasimpstc_sectorType_Position', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 347, 3), )
Position = property(__Position.value, __Position.set, None, u'The vertex position of the sector')
# Element PosAngle1 uses Python identifier PosAngle1
__PosAngle1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PosAngle1'), 'PosAngle1', '__httpeuclid_esa_orgschemabasimpstc_sectorType_PosAngle1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 352, 3), )
PosAngle1 = property(__PosAngle1.value, __PosAngle1.set, None, u'The area cw from this position angle is included')
# Element PosAngle2 uses Python identifier PosAngle2
__PosAngle2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PosAngle2'), 'PosAngle2', '__httpeuclid_esa_orgschemabasimpstc_sectorType_PosAngle2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 357, 3), )
PosAngle2 = property(__PosAngle2.value, __PosAngle2.set, None, u'The area cw from this position angle is included.')
_ElementMap.update({
__Position.name() : __Position,
__PosAngle1.name() : __PosAngle1,
__PosAngle2.name() : __PosAngle2
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'sectorType', sectorType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}halfspaceType with content type ELEMENT_ONLY
class halfspaceType (pyxb.binding.basis.complexTypeDefinition):
"""An area on the unit sphere defined by the intersection with a plane"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'halfspaceType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 372, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Vector uses Python identifier Vector
__Vector = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Vector'), 'Vector', '__httpeuclid_esa_orgschemabasimpstc_halfspaceType_Vector', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 377, 3), )
Vector = property(__Vector.value, __Vector.set, None, u'This needs to be a spherical coordinate vector; it is the unit vector that is normal to the plane that forms a constraint for a convex')
# Element Offset uses Python identifier Offset
__Offset = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Offset'), 'Offset', '__httpeuclid_esa_orgschemabasimpstc_halfspaceType_Offset', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 382, 3), )
Offset = property(__Offset.value, __Offset.set, None, u'The distance along the normal vector where the constraint plane intersects that vector; if positive, the spherical sector on the far side (seen from the center) is selected; if negative, the point of intersection is in the opposite direction of the vector, resulting in more than a hemisphere; the valid range is -1.0 to +1.0')
_ElementMap.update({
__Vector.name() : __Vector,
__Offset.name() : __Offset
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'halfspaceType', halfspaceType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}convexType with content type ELEMENT_ONLY
class convexType (pyxb.binding.basis.complexTypeDefinition):
"""A convex polygon defined by one or more Constraints"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'convexType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 390, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Halfspace uses Python identifier Halfspace
__Halfspace = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Halfspace'), 'Halfspace', '__httpeuclid_esa_orgschemabasimpstc_convexType_Halfspace', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 395, 3), )
Halfspace = property(__Halfspace.value, __Halfspace.set, None, None)
_ElementMap.update({
__Halfspace.name() : __Halfspace
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'convexType', convexType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}unionType with content type ELEMENT_ONLY
class unionType (pyxb.binding.basis.complexTypeDefinition):
"""The union of two or more regions is a region"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'unionType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 401, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Region uses Python identifier Region
__Region = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Region'), 'Region', '__httpeuclid_esa_orgschemabasimpstc_unionType_Region', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 406, 3), )
Region = property(__Region.value, __Region.set, None, None)
_ElementMap.update({
__Region.name() : __Region
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'unionType', unionType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}intersectionType with content type ELEMENT_ONLY
class intersectionType (pyxb.binding.basis.complexTypeDefinition):
"""The intersection of two or more regions is a region"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'intersectionType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 410, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Region uses Python identifier Region
__Region = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Region'), 'Region', '__httpeuclid_esa_orgschemabasimpstc_intersectionType_Region', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 415, 3), )
Region = property(__Region.value, __Region.set, None, None)
_ElementMap.update({
__Region.name() : __Region
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'intersectionType', intersectionType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}negationType with content type ELEMENT_ONLY
class negationType (pyxb.binding.basis.complexTypeDefinition):
"""The negation of a region is a region"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'negationType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 419, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Region uses Python identifier Region
__Region = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Region'), 'Region', '__httpeuclid_esa_orgschemabasimpstc_negationType_Region', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 424, 3), )
Region = property(__Region.value, __Region.set, None, None)
_ElementMap.update({
__Region.name() : __Region
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'negationType', negationType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}diffType with content type ELEMENT_ONLY
class diffType (pyxb.binding.basis.complexTypeDefinition):
"""The difference of two regions (Region1 minus Region2) is a region; it is equivalent to the intersection of Region1 with notRegion2"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'diffType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 428, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Region uses Python identifier Region
__Region = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Region'), 'Region', '__httpeuclid_esa_orgschemabasimpstc_diffType_Region', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 433, 3), )
Region = property(__Region.value, __Region.set, None, None)
# Element Region2 uses Python identifier Region2
__Region2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Region2'), 'Region2', '__httpeuclid_esa_orgschemabasimpstc_diffType_Region2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 434, 3), )
Region2 = property(__Region2.value, __Region2.set, None, None)
_ElementMap.update({
__Region.name() : __Region,
__Region2.name() : __Region2
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'diffType', diffType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}astroCoordSystem with content type ELEMENT_ONLY
class astroCoordSystem (pyxb.binding.basis.complexTypeDefinition):
"""The coordinate system definition : spatial coordinate frame and reference position ; time frame and reference position ; the coordinate flavor ; the spectral frame and redshift/Doppler frame; and the planetary ephemeris ; an ID is required, since this is how coordinate elements are associated with their coordinate systems. This complexType should be embedded in the generic header of a whole data set. This complexType is derived from the STC - S metadata linear string implementation. We recap that this STC - S serialization don't support : generic coordinates (only : Time, Space, Spectral and Redshift), custom coordinate reference frames and custom refrence positions, spatial frames with offset positions, relocatable frames, planetary reference frames, geodetic reference spheroids other than IAU 1976. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'astroCoordSystem')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 439, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element TimeFrame uses Python identifier TimeFrame
__TimeFrame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'TimeFrame'), 'TimeFrame', '__httpeuclid_esa_orgschemabasimpstc_astroCoordSystem_TimeFrame', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 444, 3), )
TimeFrame = property(__TimeFrame.value, __TimeFrame.set, None, None)
# Element SpaceFrame uses Python identifier SpaceFrame
__SpaceFrame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SpaceFrame'), 'SpaceFrame', '__httpeuclid_esa_orgschemabasimpstc_astroCoordSystem_SpaceFrame', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 445, 3), )
SpaceFrame = property(__SpaceFrame.value, __SpaceFrame.set, None, None)
# Element SpectralFrame uses Python identifier SpectralFrame
__SpectralFrame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SpectralFrame'), 'SpectralFrame', '__httpeuclid_esa_orgschemabasimpstc_astroCoordSystem_SpectralFrame', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 446, 3), )
SpectralFrame = property(__SpectralFrame.value, __SpectralFrame.set, None, None)
# Element RedshiftFrame uses Python identifier RedshiftFrame
__RedshiftFrame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'RedshiftFrame'), 'RedshiftFrame', '__httpeuclid_esa_orgschemabasimpstc_astroCoordSystem_RedshiftFrame', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 447, 3), )
RedshiftFrame = property(__RedshiftFrame.value, __RedshiftFrame.set, None, None)
# Attribute AstroCoordSystemId uses Python identifier AstroCoordSystemId
__AstroCoordSystemId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'AstroCoordSystemId'), 'AstroCoordSystemId', '__httpeuclid_esa_orgschemabasimpstc_astroCoordSystem_AstroCoordSystemId', pyxb.binding.datatypes.string, required=True)
__AstroCoordSystemId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 449, 2)
__AstroCoordSystemId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 449, 2)
AstroCoordSystemId = property(__AstroCoordSystemId.value, __AstroCoordSystemId.set, None, None)
_ElementMap.update({
__TimeFrame.name() : __TimeFrame,
__SpaceFrame.name() : __SpaceFrame,
__SpectralFrame.name() : __SpectralFrame,
__RedshiftFrame.name() : __RedshiftFrame
})
_AttributeMap.update({
__AstroCoordSystemId.name() : __AstroCoordSystemId
})
Namespace.addCategoryObject('typeBinding', u'astroCoordSystem', astroCoordSystem)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}timeFrame with content type ELEMENT_ONLY
class timeFrame (pyxb.binding.basis.complexTypeDefinition):
"""The time reference frame consists of a timescale, a reference position, and optionally a reference direction (needed when transformations have been applied). This type is derived from ivoa standards : STC V1.30. For simplification purpose and in order to get a better readability we met proposed simplifications from paragraph 5 timescale and refpos STC metadata linear string implementation V0.10. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeFrame')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 452, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_timeFrame_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 457, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element TimeScale uses Python identifier TimeScale
__TimeScale = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'TimeScale'), 'TimeScale', '__httpeuclid_esa_orgschemabasimpstc_timeFrame_TimeScale', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 458, 3), )
TimeScale = property(__TimeScale.value, __TimeScale.set, None, None)
# Element ReferencePosition uses Python identifier ReferencePosition
__ReferencePosition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), 'ReferencePosition', '__httpeuclid_esa_orgschemabasimpstc_timeFrame_ReferencePosition', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 459, 3), )
ReferencePosition = property(__ReferencePosition.value, __ReferencePosition.set, None, None)
# Attribute TimeFrameId uses Python identifier TimeFrameId
__TimeFrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'TimeFrameId'), 'TimeFrameId', '__httpeuclid_esa_orgschemabasimpstc_timeFrame_TimeFrameId', pyxb.binding.datatypes.string)
__TimeFrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 461, 2)
__TimeFrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 461, 2)
TimeFrameId = property(__TimeFrameId.value, __TimeFrameId.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__TimeScale.name() : __TimeScale,
__ReferencePosition.name() : __ReferencePosition
})
_AttributeMap.update({
__TimeFrameId.name() : __TimeFrameId
})
Namespace.addCategoryObject('typeBinding', u'timeFrame', timeFrame)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}spaceFrame with content type ELEMENT_ONLY
class spaceFrame (pyxb.binding.basis.complexTypeDefinition):
"""Coordinate reference frame : optional equinox with either a standard reference system (ICRS, FK5, FK4) and optional standard pole (equatorial, ecliptic, galactic, etc.), or a custom frame with pole (positive Z-axis) and positive X-axis direction.CoordFlavor provides the coordinate definitions: number of axes, SPHERICAL, CARTESIAN, UNITSPHERE, POLAR, or HEALPIX, presence of velocities. This type is derived from ivoa standards : STC V1.30."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'spaceFrame')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 464, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_spaceFrame_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 469, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element SpaceRefFrame uses Python identifier SpaceRefFrame
__SpaceRefFrame = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SpaceRefFrame'), 'SpaceRefFrame', '__httpeuclid_esa_orgschemabasimpstc_spaceFrame_SpaceRefFrame', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 470, 3), )
SpaceRefFrame = property(__SpaceRefFrame.value, __SpaceRefFrame.set, None, None)
# Element ReferencePosition uses Python identifier ReferencePosition
__ReferencePosition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), 'ReferencePosition', '__httpeuclid_esa_orgschemabasimpstc_spaceFrame_ReferencePosition', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 471, 3), )
ReferencePosition = property(__ReferencePosition.value, __ReferencePosition.set, None, None)
# Element CoordFlavor uses Python identifier CoordFlavor
__CoordFlavor = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'CoordFlavor'), 'CoordFlavor', '__httpeuclid_esa_orgschemabasimpstc_spaceFrame_CoordFlavor', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 472, 3), )
CoordFlavor = property(__CoordFlavor.value, __CoordFlavor.set, None, None)
# Attribute SpaceFrameId uses Python identifier SpaceFrameId
__SpaceFrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'SpaceFrameId'), 'SpaceFrameId', '__httpeuclid_esa_orgschemabasimpstc_spaceFrame_SpaceFrameId', pyxb.binding.datatypes.string)
__SpaceFrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 474, 2)
__SpaceFrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 474, 2)
SpaceFrameId = property(__SpaceFrameId.value, __SpaceFrameId.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__SpaceRefFrame.name() : __SpaceRefFrame,
__ReferencePosition.name() : __ReferencePosition,
__CoordFlavor.name() : __CoordFlavor
})
_AttributeMap.update({
__SpaceFrameId.name() : __SpaceFrameId
})
Namespace.addCategoryObject('typeBinding', u'spaceFrame', spaceFrame)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}spectralFrame with content type ELEMENT_ONLY
class spectralFrame (pyxb.binding.basis.complexTypeDefinition):
"""Contains the spectral frame reference position. This type is derived from ivoa standards : STC V1.30."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'spectralFrame')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 477, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_spectralFrame_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 482, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element ReferencePosition uses Python identifier ReferencePosition
__ReferencePosition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), 'ReferencePosition', '__httpeuclid_esa_orgschemabasimpstc_spectralFrame_ReferencePosition', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 483, 3), )
ReferencePosition = property(__ReferencePosition.value, __ReferencePosition.set, None, None)
# Attribute SpectralFrameId uses Python identifier SpectralFrameId
__SpectralFrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'SpectralFrameId'), 'SpectralFrameId', '__httpeuclid_esa_orgschemabasimpstc_spectralFrame_SpectralFrameId', pyxb.binding.datatypes.string)
__SpectralFrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 485, 2)
__SpectralFrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 485, 2)
SpectralFrameId = property(__SpectralFrameId.value, __SpectralFrameId.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__ReferencePosition.name() : __ReferencePosition
})
_AttributeMap.update({
__SpectralFrameId.name() : __SpectralFrameId
})
Namespace.addCategoryObject('typeBinding', u'spectralFrame', spectralFrame)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}redshiftFrame with content type ELEMENT_ONLY
class redshiftFrame (pyxb.binding.basis.complexTypeDefinition):
"""Contains the Doppler definitions, including whether the values are velocity or redshift (value). This type is derived from ivoa standards : STC V1.30."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'redshiftFrame')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 494, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_redshiftFrame_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 499, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element Value uses Python identifier Value
__Value = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value'), 'Value', '__httpeuclid_esa_orgschemabasimpstc_redshiftFrame_Value', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 500, 3), )
Value = property(__Value.value, __Value.set, None, None)
# Element DopplerDefinition uses Python identifier DopplerDefinition
__DopplerDefinition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'DopplerDefinition'), 'DopplerDefinition', '__httpeuclid_esa_orgschemabasimpstc_redshiftFrame_DopplerDefinition', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 501, 3), )
DopplerDefinition = property(__DopplerDefinition.value, __DopplerDefinition.set, None, None)
# Element ReferencePosition uses Python identifier ReferencePosition
__ReferencePosition = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), 'ReferencePosition', '__httpeuclid_esa_orgschemabasimpstc_redshiftFrame_ReferencePosition', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 502, 3), )
ReferencePosition = property(__ReferencePosition.value, __ReferencePosition.set, None, None)
# Attribute RedshiftFrameId uses Python identifier RedshiftFrameId
__RedshiftFrameId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'RedshiftFrameId'), 'RedshiftFrameId', '__httpeuclid_esa_orgschemabasimpstc_redshiftFrame_RedshiftFrameId', pyxb.binding.datatypes.string)
__RedshiftFrameId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 504, 2)
__RedshiftFrameId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 504, 2)
RedshiftFrameId = property(__RedshiftFrameId.value, __RedshiftFrameId.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__Value.name() : __Value,
__DopplerDefinition.name() : __DopplerDefinition,
__ReferencePosition.name() : __ReferencePosition
})
_AttributeMap.update({
__RedshiftFrameId.name() : __RedshiftFrameId
})
Namespace.addCategoryObject('typeBinding', u'redshiftFrame', redshiftFrame)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}spatialCoordDefType with content type ELEMENT_ONLY
class spatialCoordDefType (pyxb.binding.basis.complexTypeDefinition):
"""Provides the spatial coordinate representation either : SPHERICAL, CARTESIAN, UNITSPHERE, POLAR, or HEALPIX. SPHERICAL : Spherical 2-D (longitude, latitude) or 3-D (long, lat, radius/elevation) coordinates ; CARTESIAN : Cartesian 1-, 2-, or 3-D coordinates ; UNITSPHERE : 3-D Unit sphere coordinates (direction cosines); in (long,lat), X is in the direction (0,0), Y (pi/2,0), Z (0,pi/2) ; POLAR : 2-D polar coordinates (radius, posangle) ; CYLINDRICAL : 3-D cylindrical coordinates (radius, posangle, z) ; STRING : String coordinates (e.g., Stokes) ; HEALPIX : 2-D Healpix coordinates; defaults for H(4) and K(3). In STC metadata linear string implementation V0.10. the enumeration is more concise : SPHER2 for SPHERICAL 2-D, SPHER3 for SPHERICAL 3-D, CART1, CART2, CART 3 for CARTESIAN 1-2-or 3-D. We prefer maintain the STC full enumeration that embeds healpix flavor."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'spatialCoordDefType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 603, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element SPHERICAL uses Python identifier SPHERICAL
__SPHERICAL = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'SPHERICAL'), 'SPHERICAL', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_SPHERICAL', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 608, 3), )
SPHERICAL = property(__SPHERICAL.value, __SPHERICAL.set, None, None)
# Element CARTESIAN uses Python identifier CARTESIAN
__CARTESIAN = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'CARTESIAN'), 'CARTESIAN', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_CARTESIAN', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 609, 3), )
CARTESIAN = property(__CARTESIAN.value, __CARTESIAN.set, None, None)
# Element UNITSPHERE uses Python identifier UNITSPHERE
__UNITSPHERE = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'UNITSPHERE'), 'UNITSPHERE', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_UNITSPHERE', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 610, 3), )
UNITSPHERE = property(__UNITSPHERE.value, __UNITSPHERE.set, None, None)
# Element POLAR uses Python identifier POLAR
__POLAR = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'POLAR'), 'POLAR', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_POLAR', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 611, 3), )
POLAR = property(__POLAR.value, __POLAR.set, None, None)
# Element CYLINDRICAL uses Python identifier CYLINDRICAL
__CYLINDRICAL = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'CYLINDRICAL'), 'CYLINDRICAL', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_CYLINDRICAL', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 612, 3), )
CYLINDRICAL = property(__CYLINDRICAL.value, __CYLINDRICAL.set, None, None)
# Element STRING uses Python identifier STRING
__STRING = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'STRING'), 'STRING', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_STRING', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 613, 3), )
STRING = property(__STRING.value, __STRING.set, None, None)
# Element HEALPIX uses Python identifier HEALPIX
__HEALPIX = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'HEALPIX'), 'HEALPIX', '__httpeuclid_esa_orgschemabasimpstc_spatialCoordDefType_HEALPIX', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 614, 3), )
HEALPIX = property(__HEALPIX.value, __HEALPIX.set, None, None)
_ElementMap.update({
__SPHERICAL.name() : __SPHERICAL,
__CARTESIAN.name() : __CARTESIAN,
__UNITSPHERE.name() : __UNITSPHERE,
__POLAR.name() : __POLAR,
__CYLINDRICAL.name() : __CYLINDRICAL,
__STRING.name() : __STRING,
__HEALPIX.name() : __HEALPIX
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'spatialCoordDefType', spatialCoordDefType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}isoTime with content type SIMPLE
class isoTime (pyxb.binding.basis.complexTypeDefinition):
"""ISO8601 time; note: only a limited subset of ISO 8601 is allowed: yyyy-mm-ddThh:mm:ss.sss...; unfortunately, XSchema does not allow hh, mm, or ss to be optional, ".ss" is. This type is derived from IVOA STC V1.3."""
_TypeDefinition = pyxb.binding.datatypes.dateTime
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'isoTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 777, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.dateTime
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'isoTime', isoTime)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}JDTime with content type SIMPLE
class JDTime (pyxb.binding.basis.complexTypeDefinition):
"""A decimal type for JD and MJD, with optional referencing."""
_TypeDefinition = pyxb.binding.datatypes.decimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'JDTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 786, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.decimal
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'JDTime', JDTime)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}MJDTime with content type SIMPLE
class MJDTime (pyxb.binding.basis.complexTypeDefinition):
"""MJD time (=JD - 2400000.5)"""
_TypeDefinition = pyxb.binding.datatypes.decimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'MJDTime')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 795, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.decimal
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'MJDTime', MJDTime)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}timeOffset with content type SIMPLE
class timeOffset (pyxb.binding.basis.complexTypeDefinition):
"""Actual elapsed time offset"""
_TypeDefinition = pyxb.binding.datatypes.decimal
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeOffset')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 804, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.decimal
_ElementMap.update({
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'timeOffset', timeOffset)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}astronTimeType with content type ELEMENT_ONLY
class astronTimeType (pyxb.binding.basis.complexTypeDefinition):
"""astronTime is the generalized astronomical time type and consists of one, two, or three elements: optional TimeScale, optional relative time offset, and an absolute time (ISO8601 or a decimal JD or MJD) ; TimeScale may be omitted only if the element is part of AstroCoords, referring to an AstroCoordSystem that specifies a TimeScale."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'astronTimeType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 813, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Timescale uses Python identifier Timescale
__Timescale = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Timescale'), 'Timescale', '__httpeuclid_esa_orgschemabasimpstc_astronTimeType_Timescale', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 818, 3), )
Timescale = property(__Timescale.value, __Timescale.set, None, None)
# Element TimeOffset uses Python identifier TimeOffset
__TimeOffset = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'TimeOffset'), 'TimeOffset', '__httpeuclid_esa_orgschemabasimpstc_astronTimeType_TimeOffset', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 819, 3), )
TimeOffset = property(__TimeOffset.value, __TimeOffset.set, None, None)
# Element AbsoluteTime uses Python identifier AbsoluteTime
__AbsoluteTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'AbsoluteTime'), 'AbsoluteTime', '__httpeuclid_esa_orgschemabasimpstc_astronTimeType_AbsoluteTime', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 820, 3), )
AbsoluteTime = property(__AbsoluteTime.value, __AbsoluteTime.set, None, None)
_ElementMap.update({
__Timescale.name() : __Timescale,
__TimeOffset.name() : __TimeOffset,
__AbsoluteTime.name() : __AbsoluteTime
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'astronTimeType', astronTimeType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}TAIMillisecsecDateTimeRange with content type ELEMENT_ONLY
class TAIMillisecsecDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An non UTC date-time range with a precision of one millisecond"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'TAIMillisecsecDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 889, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_TAIMillisecsecDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 894, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_TAIMillisecsecDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 895, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'TAIMillisecsecDateTimeRange', TAIMillisecsecDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}UTCDateTimeRange with content type ELEMENT_ONLY
class UTCDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An UTC date-time range"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 911, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_UTCDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 916, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_UTCDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 917, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'UTCDateTimeRange', UTCDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}UTCMicrosecDateTimeRange with content type ELEMENT_ONLY
class UTCMicrosecDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An UTC date-time range with a precision of one microsecond"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCMicrosecDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 932, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_UTCMicrosecDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 937, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_UTCMicrosecDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 938, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'UTCMicrosecDateTimeRange', UTCMicrosecDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}UTCMillisecDateTimeRange with content type ELEMENT_ONLY
class UTCMillisecDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An UTC date-time range with a precision of one millisecond"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCMillisecDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 953, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_UTCMillisecDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 958, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_UTCMillisecDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 959, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'UTCMillisecDateTimeRange', UTCMillisecDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}UTCSecDateTimeRange with content type ELEMENT_ONLY
class UTCSecDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An UTC date-time range with a precision of one second"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCSecDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 974, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_UTCSecDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 979, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_UTCSecDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 980, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'UTCSecDateTimeRange', UTCSecDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}UTCTenthMicrosecDateTimeRange with content type ELEMENT_ONLY
class UTCTenthMicrosecDateTimeRange (pyxb.binding.basis.complexTypeDefinition):
"""An UTC date-time range with a precision of one tenth-of-a-microsecond"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'UTCTenthMicrosecDateTimeRange')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 984, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element start uses Python identifier start
__start = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'start'), 'start', '__httpeuclid_esa_orgschemabasimpstc_UTCTenthMicrosecDateTimeRange_start', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 989, 3), )
start = property(__start.value, __start.set, None, None)
# Element end uses Python identifier end
__end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'end'), 'end', '__httpeuclid_esa_orgschemabasimpstc_UTCTenthMicrosecDateTimeRange_end', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 990, 3), )
end = property(__end.value, __end.set, None, None)
_ElementMap.update({
__start.name() : __start,
__end.name() : __end
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'UTCTenthMicrosecDateTimeRange', UTCTenthMicrosecDateTimeRange)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}regionType with content type ELEMENT_ONLY
class regionType (coordScalarIntervalType):
"""A Region is a Shape or the result of a Region Operation involving one or more Regions"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'regionType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 224, 1)
_ElementMap = coordScalarIntervalType._ElementMap.copy()
_AttributeMap = coordScalarIntervalType._AttributeMap.copy()
# Base type is coordScalarIntervalType
# Element LoLimit (LoLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element HiLimit (HiLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element Area uses Python identifier Area
__Area = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Area'), 'Area', '__httpeuclid_esa_orgschemabasimpstc_regionType_Area', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 231, 5), )
Area = property(__Area.value, __Area.set, None, None)
# Attribute lo_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute hi_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute fill_factor inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute FrameId inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute note uses Python identifier note
__note = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'note'), 'note', '__httpeuclid_esa_orgschemabasimpstc_regionType_note', pyxb.binding.datatypes.string)
__note._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 233, 4)
__note._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 233, 4)
note = property(__note.value, __note.set, None, None)
# Attribute astroCoordSystem uses Python identifier astroCoordSystem
__astroCoordSystem = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'astroCoordSystem'), 'astroCoordSystem', '__httpeuclid_esa_orgschemabasimpstc_regionType_astroCoordSystem', pyxb.binding.datatypes.string)
__astroCoordSystem._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 234, 4)
__astroCoordSystem._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 234, 4)
astroCoordSystem = property(__astroCoordSystem.value, __astroCoordSystem.set, None, None)
_ElementMap.update({
__Area.name() : __Area
})
_AttributeMap.update({
__note.name() : __note,
__astroCoordSystem.name() : __astroCoordSystem
})
Namespace.addCategoryObject('typeBinding', u'regionType', regionType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}coordFlavorType with content type EMPTY
class coordFlavorType (pyxb.binding.basis.complexTypeDefinition):
"""Provides the characteristics of the spatial coordinate frame : number of axes, handedness."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'coordFlavorType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 595, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Attribute coord_naxes uses Python identifier coord_naxes
__coord_naxes = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'coord_naxes'), 'coord_naxes', '__httpeuclid_esa_orgschemabasimpstc_coordFlavorType_coord_naxes', coordNaxesValue, unicode_default=u'2')
__coord_naxes._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 599, 2)
__coord_naxes._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 599, 2)
coord_naxes = property(__coord_naxes.value, __coord_naxes.set, None, None)
# Attribute handedness uses Python identifier handedness
__handedness = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'handedness'), 'handedness', '__httpeuclid_esa_orgschemabasimpstc_coordFlavorType_handedness', handednessValue)
__handedness._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 600, 2)
__handedness._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 600, 2)
handedness = property(__handedness.value, __handedness.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__coord_naxes.name() : __coord_naxes,
__handedness.name() : __handedness
})
Namespace.addCategoryObject('typeBinding', u'coordFlavorType', coordFlavorType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}timeIntervalType with content type ELEMENT_ONLY
class timeIntervalType (coordScalarIntervalType):
"""The time interval needs to contain a start time and a stop time ; it needs to refer to a coordinate system; boundaries may or may not be inclusive. This type comes from STC ivoa schema, StartTime and StopTime are mandatory. """
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1027, 1)
_ElementMap = coordScalarIntervalType._ElementMap.copy()
_AttributeMap = coordScalarIntervalType._AttributeMap.copy()
# Base type is coordScalarIntervalType
# Element LoLimit (LoLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element HiLimit (HiLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element StartTime uses Python identifier StartTime
__StartTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'StartTime'), 'StartTime', '__httpeuclid_esa_orgschemabasimpstc_timeIntervalType_StartTime', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1034, 5), )
StartTime = property(__StartTime.value, __StartTime.set, None, u'astronTime may be expressed in ISO8601 or as a double relative to a reference time')
# Element StopTime uses Python identifier StopTime
__StopTime = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'StopTime'), 'StopTime', '__httpeuclid_esa_orgschemabasimpstc_timeIntervalType_StopTime', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1039, 5), )
StopTime = property(__StopTime.value, __StopTime.set, None, u'astronTime may be expressed in ISO8601 or as a double relative to a reference time')
# Attribute lo_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute hi_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute fill_factor inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute FrameId inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
_ElementMap.update({
__StartTime.name() : __StartTime,
__StopTime.name() : __StopTime
})
_AttributeMap.update({
})
Namespace.addCategoryObject('typeBinding', u'timeIntervalType', timeIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}basicCoordinateType with content type ELEMENT_ONLY
class basicCoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""Basic scalar coordinate type. Single Error, Resolution, Size, PixSize elements indicate definite values ; pairs indicate ranges."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'basicCoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 16, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 21, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element Value uses Python identifier Value
__Value = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value'), 'Value', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_Value', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 22, 3), )
Value = property(__Value.value, __Value.set, None, None)
# Element Error uses Python identifier Error
__Error = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Error'), 'Error', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_Error', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 23, 3), )
Error = property(__Error.value, __Error.set, None, None)
# Element Resolution uses Python identifier Resolution
__Resolution = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Resolution'), 'Resolution', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_Resolution', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 24, 3), )
Resolution = property(__Resolution.value, __Resolution.set, None, None)
# Element Size uses Python identifier Size
__Size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Size'), 'Size', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_Size', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 25, 3), )
Size = property(__Size.value, __Size.set, None, None)
# Element PixSize uses Python identifier PixSize
__PixSize = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PixSize'), 'PixSize', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_PixSize', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 26, 3), )
PixSize = property(__PixSize.value, __PixSize.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_basicCoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 28, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 28, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__Value.name() : __Value,
__Error.name() : __Error,
__Resolution.name() : __Resolution,
__Size.name() : __Size,
__PixSize.name() : __PixSize
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'basicCoordinateType', basicCoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}pixelVector1CoordinateType with content type ELEMENT_ONLY
class pixelVector1CoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""Scalar pixel coordinate type"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'pixelVector1CoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 31, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_pixelVector1CoordinateType_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 36, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element Value uses Python identifier Value
__Value = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value'), 'Value', '__httpeuclid_esa_orgschemabasimpstc_pixelVector1CoordinateType_Value', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 37, 3), )
Value = property(__Value.value, __Value.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_pixelVector1CoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 39, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 39, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__Value.name() : __Value
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'pixelVector1CoordinateType', pixelVector1CoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}timeCoordinateType with content type ELEMENT_ONLY
class timeCoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""Time coordinate type; sibling of basicCoordinateTypeSingle Error, Resolution, Size, PixSize elements indicate definite values ; pairs indicate ranges."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'timeCoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 42, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name uses Python identifier Name
__Name = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name'), 'Name', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_Name', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 48, 3), )
Name = property(__Name.value, __Name.set, None, None)
# Element TimeInstant uses Python identifier TimeInstant
__TimeInstant = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'TimeInstant'), 'TimeInstant', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_TimeInstant', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 49, 3), )
TimeInstant = property(__TimeInstant.value, __TimeInstant.set, None, None)
# Element Error uses Python identifier Error
__Error = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Error'), 'Error', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_Error', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 50, 3), )
Error = property(__Error.value, __Error.set, None, None)
# Element Resolution uses Python identifier Resolution
__Resolution = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Resolution'), 'Resolution', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_Resolution', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 51, 3), )
Resolution = property(__Resolution.value, __Resolution.set, None, None)
# Element Size uses Python identifier Size
__Size = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Size'), 'Size', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_Size', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 52, 3), )
Size = property(__Size.value, __Size.set, None, None)
# Element PixSize uses Python identifier PixSize
__PixSize = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PixSize'), 'PixSize', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_PixSize', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 53, 3), )
PixSize = property(__PixSize.value, __PixSize.set, None, None)
# Attribute AstroCoordSystemId uses Python identifier AstroCoordSystemId
__AstroCoordSystemId = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'AstroCoordSystemId'), 'AstroCoordSystemId', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_AstroCoordSystemId', pyxb.binding.datatypes.string)
__AstroCoordSystemId._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 55, 2)
__AstroCoordSystemId._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 55, 2)
AstroCoordSystemId = property(__AstroCoordSystemId.value, __AstroCoordSystemId.set, None, None)
# Attribute TimeUnit uses Python identifier TimeUnit
__TimeUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'TimeUnit'), 'TimeUnit', '__httpeuclid_esa_orgschemabasimpstc_timeCoordinateType_TimeUnit', _ImportedBinding_euclid_dm__utd.unit)
__TimeUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 56, 2)
__TimeUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 56, 2)
TimeUnit = property(__TimeUnit.value, __TimeUnit.set, None, None)
_ElementMap.update({
__Name.name() : __Name,
__TimeInstant.name() : __TimeInstant,
__Error.name() : __Error,
__Resolution.name() : __Resolution,
__Size.name() : __Size,
__PixSize.name() : __PixSize
})
_AttributeMap.update({
__AstroCoordSystemId.name() : __AstroCoordSystemId,
__TimeUnit.name() : __TimeUnit
})
Namespace.addCategoryObject('typeBinding', u'timeCoordinateType', timeCoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}vector2CoordinateType with content type ELEMENT_ONLY
class vector2CoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""2-D coordinate typeSingle Error2, Resolution2, Size2, PixSize2 elements indicate definite values ; pairs indicate ranges."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'vector2CoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 60, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name1 uses Python identifier Name1
__Name1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name1'), 'Name1', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Name1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 66, 3), )
Name1 = property(__Name1.value, __Name1.set, None, None)
# Element Name2 uses Python identifier Name2
__Name2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name2'), 'Name2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Name2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 67, 3), )
Name2 = property(__Name2.value, __Name2.set, None, None)
# Element Value2 uses Python identifier Value2
__Value2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value2'), 'Value2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Value2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 68, 3), )
Value2 = property(__Value2.value, __Value2.set, None, None)
# Element Error2 uses Python identifier Error2
__Error2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Error2'), 'Error2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Error2', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 69, 3), )
Error2 = property(__Error2.value, __Error2.set, None, None)
# Element Resolution2 uses Python identifier Resolution2
__Resolution2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Resolution2'), 'Resolution2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Resolution2', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 70, 3), )
Resolution2 = property(__Resolution2.value, __Resolution2.set, None, None)
# Element Size2 uses Python identifier Size2
__Size2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Size2'), 'Size2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_Size2', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 71, 3), )
Size2 = property(__Size2.value, __Size2.set, None, None)
# Element PixSize2 uses Python identifier PixSize2
__PixSize2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PixSize2'), 'PixSize2', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_PixSize2', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 72, 3), )
PixSize2 = property(__PixSize2.value, __PixSize2.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_vector2CoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 74, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 74, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name1.name() : __Name1,
__Name2.name() : __Name2,
__Value2.name() : __Value2,
__Error2.name() : __Error2,
__Resolution2.name() : __Resolution2,
__Size2.name() : __Size2,
__PixSize2.name() : __PixSize2
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'vector2CoordinateType', vector2CoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}pixelVector2CoordinateType with content type ELEMENT_ONLY
class pixelVector2CoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""2-D pixel coordinate type"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'pixelVector2CoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 77, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name1 uses Python identifier Name1
__Name1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name1'), 'Name1', '__httpeuclid_esa_orgschemabasimpstc_pixelVector2CoordinateType_Name1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 82, 3), )
Name1 = property(__Name1.value, __Name1.set, None, None)
# Element Name2 uses Python identifier Name2
__Name2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name2'), 'Name2', '__httpeuclid_esa_orgschemabasimpstc_pixelVector2CoordinateType_Name2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 83, 3), )
Name2 = property(__Name2.value, __Name2.set, None, None)
# Element Value2 uses Python identifier Value2
__Value2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value2'), 'Value2', '__httpeuclid_esa_orgschemabasimpstc_pixelVector2CoordinateType_Value2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 84, 3), )
Value2 = property(__Value2.value, __Value2.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_pixelVector2CoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 86, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 86, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name1.name() : __Name1,
__Name2.name() : __Name2,
__Value2.name() : __Value2
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'pixelVector2CoordinateType', pixelVector2CoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}vector3CoordinateType with content type ELEMENT_ONLY
class vector3CoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""3-D coordinate typeSingle Error3, Resolution3, Size3, PixSize3 elements indicate definite values ; pairs indicate ranges."""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'vector3CoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 90, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name1 uses Python identifier Name1
__Name1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name1'), 'Name1', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Name1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 96, 3), )
Name1 = property(__Name1.value, __Name1.set, None, None)
# Element Name2 uses Python identifier Name2
__Name2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name2'), 'Name2', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Name2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 97, 3), )
Name2 = property(__Name2.value, __Name2.set, None, None)
# Element Name3 uses Python identifier Name3
__Name3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name3'), 'Name3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Name3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 98, 3), )
Name3 = property(__Name3.value, __Name3.set, None, None)
# Element Value3 uses Python identifier Value3
__Value3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value3'), 'Value3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Value3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 99, 3), )
Value3 = property(__Value3.value, __Value3.set, None, None)
# Element Error3 uses Python identifier Error3
__Error3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Error3'), 'Error3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Error3', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 100, 3), )
Error3 = property(__Error3.value, __Error3.set, None, None)
# Element Resolution3 uses Python identifier Resolution3
__Resolution3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Resolution3'), 'Resolution3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Resolution3', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 101, 3), )
Resolution3 = property(__Resolution3.value, __Resolution3.set, None, None)
# Element Size3 uses Python identifier Size3
__Size3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Size3'), 'Size3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_Size3', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 102, 3), )
Size3 = property(__Size3.value, __Size3.set, None, None)
# Element PixSize3 uses Python identifier PixSize3
__PixSize3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'PixSize3'), 'PixSize3', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_PixSize3', True, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 103, 3), )
PixSize3 = property(__PixSize3.value, __PixSize3.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_vector3CoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 105, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 105, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name1.name() : __Name1,
__Name2.name() : __Name2,
__Name3.name() : __Name3,
__Value3.name() : __Value3,
__Error3.name() : __Error3,
__Resolution3.name() : __Resolution3,
__Size3.name() : __Size3,
__PixSize3.name() : __PixSize3
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'vector3CoordinateType', vector3CoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}pixelVector3CoordinateType with content type ELEMENT_ONLY
class pixelVector3CoordinateType (pyxb.binding.basis.complexTypeDefinition):
"""3-D pixel coordinate type"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'pixelVector3CoordinateType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 108, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.anyType
# Element Name1 uses Python identifier Name1
__Name1 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name1'), 'Name1', '__httpeuclid_esa_orgschemabasimpstc_pixelVector3CoordinateType_Name1', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 113, 3), )
Name1 = property(__Name1.value, __Name1.set, None, None)
# Element Name2 uses Python identifier Name2
__Name2 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name2'), 'Name2', '__httpeuclid_esa_orgschemabasimpstc_pixelVector3CoordinateType_Name2', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 114, 3), )
Name2 = property(__Name2.value, __Name2.set, None, None)
# Element Name3 uses Python identifier Name3
__Name3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Name3'), 'Name3', '__httpeuclid_esa_orgschemabasimpstc_pixelVector3CoordinateType_Name3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 115, 3), )
Name3 = property(__Name3.value, __Name3.set, None, None)
# Element Value3 uses Python identifier Value3
__Value3 = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(None, u'Value3'), 'Value3', '__httpeuclid_esa_orgschemabasimpstc_pixelVector3CoordinateType_Value3', False, pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 116, 3), )
Value3 = property(__Value3.value, __Value3.set, None, None)
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_pixelVector3CoordinateType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 118, 2)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 118, 2)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
_ElementMap.update({
__Name1.name() : __Name1,
__Name2.name() : __Name2,
__Name3.name() : __Name3,
__Value3.name() : __Value3
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit
})
Namespace.addCategoryObject('typeBinding', u'pixelVector3CoordinateType', pixelVector3CoordinateType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}spectralIntervalType with content type ELEMENT_ONLY
class spectralIntervalType (coordScalarIntervalType):
"""Contains a 1-D spectral interval"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'spectralIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 188, 1)
_ElementMap = coordScalarIntervalType._ElementMap.copy()
_AttributeMap = coordScalarIntervalType._AttributeMap.copy()
# Base type is coordScalarIntervalType
# Element LoLimit (LoLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element HiLimit (HiLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute lo_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute hi_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute fill_factor inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute FrameId inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute SpectralUnit uses Python identifier SpectralUnit
__SpectralUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'SpectralUnit'), 'SpectralUnit', '__httpeuclid_esa_orgschemabasimpstc_spectralIntervalType_SpectralUnit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__SpectralUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 194, 4)
__SpectralUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 194, 4)
SpectralUnit = property(__SpectralUnit.value, __SpectralUnit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__SpectralUnit.name() : __SpectralUnit
})
Namespace.addCategoryObject('typeBinding', u'spectralIntervalType', spectralIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}redshiftIntervalType with content type ELEMENT_ONLY
class redshiftIntervalType (coordScalarIntervalType):
"""Contains a 1-D redshift interval; position and velocity units are required if redshifts are expressed as Doppler velocities"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'redshiftIntervalType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 199, 1)
_ElementMap = coordScalarIntervalType._ElementMap.copy()
_AttributeMap = coordScalarIntervalType._AttributeMap.copy()
# Base type is coordScalarIntervalType
# Element LoLimit (LoLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Element HiLimit (HiLimit) inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute lo_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute hi_include inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute fill_factor inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute FrameId inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordScalarIntervalType
# Attribute CoordUnit uses Python identifier CoordUnit
__CoordUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'CoordUnit'), 'CoordUnit', '__httpeuclid_esa_orgschemabasimpstc_redshiftIntervalType_CoordUnit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__CoordUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 205, 4)
__CoordUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 205, 4)
CoordUnit = property(__CoordUnit.value, __CoordUnit.set, None, None)
# Attribute RedshiftUnit uses Python identifier RedshiftUnit
__RedshiftUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'RedshiftUnit'), 'RedshiftUnit', '__httpeuclid_esa_orgschemabasimpstc_redshiftIntervalType_RedshiftUnit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__RedshiftUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 206, 4)
__RedshiftUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 206, 4)
RedshiftUnit = property(__RedshiftUnit.value, __RedshiftUnit.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__CoordUnit.name() : __CoordUnit,
__RedshiftUnit.name() : __RedshiftUnit
})
Namespace.addCategoryObject('typeBinding', u'redshiftIntervalType', redshiftIntervalType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}regionAreaType with content type SIMPLE
class regionAreaType (pyxb.binding.basis.complexTypeDefinition):
"""Element to hold the area of a Region, once calculated; the element holds the actual area, linearAreaUnit the linear units of the of the area (i.e., it should be squared to get the proper units of the area), and validArea indicates whether the area has been calculated properly."""
_TypeDefinition = pyxb.binding.datatypes.double
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'regionAreaType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 212, 1)
_ElementMap = {}
_AttributeMap = {}
# Base type is pyxb.binding.datatypes.double
# Attribute linearAreaUnit uses Python identifier linearAreaUnit
__linearAreaUnit = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'linearAreaUnit'), 'linearAreaUnit', '__httpeuclid_esa_orgschemabasimpstc_regionAreaType_linearAreaUnit', _ImportedBinding_euclid_dm__utd.unit, required=True)
__linearAreaUnit._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 218, 4)
__linearAreaUnit._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 218, 4)
linearAreaUnit = property(__linearAreaUnit.value, __linearAreaUnit.set, None, None)
# Attribute validArea uses Python identifier validArea
__validArea = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'validArea'), 'validArea', '__httpeuclid_esa_orgschemabasimpstc_regionAreaType_validArea', pyxb.binding.datatypes.boolean, required=True)
__validArea._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 219, 4)
__validArea._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 219, 4)
validArea = property(__validArea.value, __validArea.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__linearAreaUnit.name() : __linearAreaUnit,
__validArea.name() : __validArea
})
Namespace.addCategoryObject('typeBinding', u'regionAreaType', regionAreaType)
# Complex type {http://euclid.esa.org/schema/bas/imp/stc}healpixType with content type EMPTY
class healpixType (coordFlavorType):
"""2-D Healpix coordinates; defaults for H(4) and K(3)"""
_TypeDefinition = None
_ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_EMPTY
_Abstract = False
_ExpandedName = pyxb.namespace.ExpandedName(Namespace, u'healpixType')
_XSDLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 618, 1)
_ElementMap = coordFlavorType._ElementMap.copy()
_AttributeMap = coordFlavorType._AttributeMap.copy()
# Base type is coordFlavorType
# Attribute coord_naxes inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordFlavorType
# Attribute handedness inherited from {http://euclid.esa.org/schema/bas/imp/stc}coordFlavorType
# Attribute healpix_H uses Python identifier healpix_H
__healpix_H = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'healpix_H'), 'healpix_H', '__httpeuclid_esa_orgschemabasimpstc_healpixType_healpix_H', pyxb.binding.datatypes.short, unicode_default=u'4')
__healpix_H._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 624, 4)
__healpix_H._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 624, 4)
healpix_H = property(__healpix_H.value, __healpix_H.set, None, None)
# Attribute healpix_K uses Python identifier healpix_K
__healpix_K = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(None, u'healpix_K'), 'healpix_K', '__httpeuclid_esa_orgschemabasimpstc_healpixType_healpix_K', pyxb.binding.datatypes.short, unicode_default=u'3')
__healpix_K._DeclarationLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 625, 4)
__healpix_K._UseLocation = pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 625, 4)
healpix_K = property(__healpix_K.value, __healpix_K.set, None, None)
_ElementMap.update({
})
_AttributeMap.update({
__healpix_H.name() : __healpix_H,
__healpix_K.name() : __healpix_K
})
Namespace.addCategoryObject('typeBinding', u'healpixType', healpixType)
coordScalarIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'LoLimit'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=coordScalarIntervalType, documentation=u'Lower bound of interval.', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3)))
coordScalarIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'HiLimit'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=coordScalarIntervalType, documentation=u'Upper bound of interval.', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3)))
def _BuildAutomaton ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton
del _BuildAutomaton
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(coordScalarIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(coordScalarIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
coordScalarIntervalType._Automaton = _BuildAutomaton()
coord2VecIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'LoLimit2Vec'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=coord2VecIntervalType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 161, 3)))
coord2VecIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'HiLimit2Vec'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=coord2VecIntervalType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 162, 3)))
def _BuildAutomaton_ ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_
del _BuildAutomaton_
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 161, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 162, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(coord2VecIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit2Vec')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 161, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(coord2VecIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit2Vec')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 162, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
coord2VecIntervalType._Automaton = _BuildAutomaton_()
coord3VecIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'LoLimit3Vec'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=coord3VecIntervalType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 177, 3)))
coord3VecIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'HiLimit3Vec'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=coord3VecIntervalType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 178, 3)))
def _BuildAutomaton_2 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_2
del _BuildAutomaton_2
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 177, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 178, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(coord3VecIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit3Vec')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 177, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(coord3VecIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit3Vec')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 178, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
coord3VecIntervalType._Automaton = _BuildAutomaton_2()
circleType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Center'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=circleType, documentation=u"The coordinates of the circle's center", location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 250, 3)))
circleType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Radius'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=circleType, documentation=u'The radius of the circle', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 255, 3)))
def _BuildAutomaton_3 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_3
del _BuildAutomaton_3
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(circleType._UseForTag(pyxb.namespace.ExpandedName(None, u'Center')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 250, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(circleType._UseForTag(pyxb.namespace.ExpandedName(None, u'Radius')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 255, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
circleType._Automaton = _BuildAutomaton_3()
ellipseType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Center'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=ellipseType, documentation=u"The coordinates of the circle's center", location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 268, 3)))
ellipseType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SemiMajorAxis'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=ellipseType, documentation=u'The radius of the circle', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 273, 3)))
ellipseType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SemiMinorAxis'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=ellipseType, documentation=u'Half the minor axis of the ellipse, in radius_unit', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 278, 3)))
ellipseType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PosAngle'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=ellipseType, documentation=u'Position angle of major axis (Radius).', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 283, 3)))
def _BuildAutomaton_4 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_4
del _BuildAutomaton_4
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(ellipseType._UseForTag(pyxb.namespace.ExpandedName(None, u'Center')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 268, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(ellipseType._UseForTag(pyxb.namespace.ExpandedName(None, u'SemiMajorAxis')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 273, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(ellipseType._UseForTag(pyxb.namespace.ExpandedName(None, u'SemiMinorAxis')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 278, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(ellipseType._UseForTag(pyxb.namespace.ExpandedName(None, u'PosAngle')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 283, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
ellipseType._Automaton = _BuildAutomaton_4()
smallCircleType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Pole'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=smallCircleType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 297, 3)))
def _BuildAutomaton_5 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_5
del _BuildAutomaton_5
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 297, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(smallCircleType._UseForTag(pyxb.namespace.ExpandedName(None, u'Pole')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 297, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
smallCircleType._Automaton = _BuildAutomaton_5()
vertexType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Position'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vertexType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 306, 3)))
vertexType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SmallCircle'), smallCircleType, scope=vertexType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 307, 3)))
def _BuildAutomaton_6 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_6
del _BuildAutomaton_6
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 307, 3))
counters.add(cc_0)
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(vertexType._UseForTag(pyxb.namespace.ExpandedName(None, u'Position')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 306, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(vertexType._UseForTag(pyxb.namespace.ExpandedName(None, u'SmallCircle')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 307, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
vertexType._Automaton = _BuildAutomaton_6()
polygonType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Vertex'), vertexType, scope=polygonType, documentation=u'In order to form polygons, vertices are to be connected with straight line segments. In the case of spherical coordinates: greatcircle segments; if a smallCircle element si present, the vertex and its predecessor are to be connected with a smallcircle, by default in the CoordSys that is referenced; optionally, a pole may be specified (other than the CoordSys pole) that defines the smallcircle system', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 316, 3)))
def _BuildAutomaton_7 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_7
del _BuildAutomaton_7
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=1, max=100L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 316, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(polygonType._UseForTag(pyxb.namespace.ExpandedName(None, u'Vertex')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 316, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
polygonType._Automaton = _BuildAutomaton_7()
boxType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Center'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=boxType, documentation=u"The coordinates of the box's center", location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 329, 3)))
boxType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Size'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=boxType, documentation=u"The lengths of the box's sides", location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 334, 3)))
def _BuildAutomaton_8 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_8
del _BuildAutomaton_8
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(boxType._UseForTag(pyxb.namespace.ExpandedName(None, u'Center')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 329, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(boxType._UseForTag(pyxb.namespace.ExpandedName(None, u'Size')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 334, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
boxType._Automaton = _BuildAutomaton_8()
sectorType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Position'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=sectorType, documentation=u'The vertex position of the sector', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 347, 3)))
sectorType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PosAngle1'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=sectorType, documentation=u'The area cw from this position angle is included', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 352, 3)))
sectorType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PosAngle2'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=sectorType, documentation=u'The area cw from this position angle is included.', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 357, 3)))
def _BuildAutomaton_9 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_9
del _BuildAutomaton_9
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(sectorType._UseForTag(pyxb.namespace.ExpandedName(None, u'Position')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 347, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(sectorType._UseForTag(pyxb.namespace.ExpandedName(None, u'PosAngle1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 352, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(sectorType._UseForTag(pyxb.namespace.ExpandedName(None, u'PosAngle2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 357, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
sectorType._Automaton = _BuildAutomaton_9()
halfspaceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Vector'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=halfspaceType, documentation=u'This needs to be a spherical coordinate vector; it is the unit vector that is normal to the plane that forms a constraint for a convex', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 377, 3)))
halfspaceType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Offset'), hsOffsetType, scope=halfspaceType, documentation=u'The distance along the normal vector where the constraint plane intersects that vector; if positive, the spherical sector on the far side (seen from the center) is selected; if negative, the point of intersection is in the opposite direction of the vector, resulting in more than a hemisphere; the valid range is -1.0 to +1.0', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 382, 3)))
def _BuildAutomaton_10 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_10
del _BuildAutomaton_10
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(halfspaceType._UseForTag(pyxb.namespace.ExpandedName(None, u'Vector')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 377, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(halfspaceType._UseForTag(pyxb.namespace.ExpandedName(None, u'Offset')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 382, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
halfspaceType._Automaton = _BuildAutomaton_10()
convexType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Halfspace'), halfspaceType, scope=convexType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 395, 3)))
def _BuildAutomaton_11 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_11
del _BuildAutomaton_11
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=1, max=100L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 395, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(convexType._UseForTag(pyxb.namespace.ExpandedName(None, u'Halfspace')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 395, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
convexType._Automaton = _BuildAutomaton_11()
unionType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Region'), regionType, scope=unionType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 406, 3)))
def _BuildAutomaton_12 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_12
del _BuildAutomaton_12
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=2L, max=100L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 406, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(unionType._UseForTag(pyxb.namespace.ExpandedName(None, u'Region')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 406, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
unionType._Automaton = _BuildAutomaton_12()
intersectionType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Region'), regionType, scope=intersectionType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 415, 3)))
def _BuildAutomaton_13 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_13
del _BuildAutomaton_13
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=2L, max=100L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 415, 3))
counters.add(cc_0)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(intersectionType._UseForTag(pyxb.namespace.ExpandedName(None, u'Region')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 415, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
intersectionType._Automaton = _BuildAutomaton_13()
negationType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Region'), regionType, scope=negationType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 424, 3)))
def _BuildAutomaton_14 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_14
del _BuildAutomaton_14
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(negationType._UseForTag(pyxb.namespace.ExpandedName(None, u'Region')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 424, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
transitions = []
st_0._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
negationType._Automaton = _BuildAutomaton_14()
diffType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Region'), regionType, scope=diffType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 433, 3)))
diffType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Region2'), regionType, scope=diffType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 434, 3)))
def _BuildAutomaton_15 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_15
del _BuildAutomaton_15
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(diffType._UseForTag(pyxb.namespace.ExpandedName(None, u'Region')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 433, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(diffType._UseForTag(pyxb.namespace.ExpandedName(None, u'Region2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 434, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
diffType._Automaton = _BuildAutomaton_15()
astroCoordSystem._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'TimeFrame'), timeFrame, scope=astroCoordSystem, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 444, 3)))
astroCoordSystem._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SpaceFrame'), spaceFrame, scope=astroCoordSystem, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 445, 3)))
astroCoordSystem._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SpectralFrame'), spectralFrame, scope=astroCoordSystem, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 446, 3)))
astroCoordSystem._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'RedshiftFrame'), redshiftFrame, scope=astroCoordSystem, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 447, 3)))
def _BuildAutomaton_16 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_16
del _BuildAutomaton_16
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 444, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 445, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 446, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 447, 3))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(astroCoordSystem._UseForTag(pyxb.namespace.ExpandedName(None, u'TimeFrame')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 444, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(astroCoordSystem._UseForTag(pyxb.namespace.ExpandedName(None, u'SpaceFrame')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 445, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(astroCoordSystem._UseForTag(pyxb.namespace.ExpandedName(None, u'SpectralFrame')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 446, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(astroCoordSystem._UseForTag(pyxb.namespace.ExpandedName(None, u'RedshiftFrame')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 447, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
astroCoordSystem._Automaton = _BuildAutomaton_16()
timeFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=timeFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 457, 3)))
timeFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'TimeScale'), timeScale, scope=timeFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 458, 3)))
timeFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), referencePosition, scope=timeFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 459, 3)))
def _BuildAutomaton_17 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_17
del _BuildAutomaton_17
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(timeFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 457, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(timeFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'TimeScale')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 458, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(timeFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'ReferencePosition')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 459, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
timeFrame._Automaton = _BuildAutomaton_17()
spaceFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=spaceFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 469, 3)))
spaceFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SpaceRefFrame'), coordRefFrame, scope=spaceFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 470, 3)))
spaceFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), referencePosition, scope=spaceFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 471, 3)))
spaceFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'CoordFlavor'), coordFlavorType, scope=spaceFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 472, 3)))
def _BuildAutomaton_18 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_18
del _BuildAutomaton_18
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(spaceFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 469, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(spaceFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'SpaceRefFrame')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 470, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(spaceFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'ReferencePosition')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 471, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spaceFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'CoordFlavor')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 472, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
spaceFrame._Automaton = _BuildAutomaton_18()
spectralFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=spectralFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 482, 3)))
spectralFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), referencePosition, scope=spectralFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 483, 3)))
def _BuildAutomaton_19 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_19
del _BuildAutomaton_19
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(spectralFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 482, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spectralFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'ReferencePosition')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 483, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
spectralFrame._Automaton = _BuildAutomaton_19()
redshiftFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=redshiftFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 499, 3)))
redshiftFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value'), redshiftFrameValue, scope=redshiftFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 500, 3)))
redshiftFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'DopplerDefinition'), dopplerDefinition, scope=redshiftFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 501, 3)))
redshiftFrame._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'ReferencePosition'), referencePosition, scope=redshiftFrame, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 502, 3)))
def _BuildAutomaton_20 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_20
del _BuildAutomaton_20
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(redshiftFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 499, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(redshiftFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'Value')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 500, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(redshiftFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'DopplerDefinition')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 501, 3))
st_2 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(redshiftFrame._UseForTag(pyxb.namespace.ExpandedName(None, u'ReferencePosition')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 502, 3))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
redshiftFrame._Automaton = _BuildAutomaton_20()
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'SPHERICAL'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 608, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'CARTESIAN'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 609, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'UNITSPHERE'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 610, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'POLAR'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 611, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'CYLINDRICAL'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 612, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'STRING'), coordFlavorType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 613, 3)))
spatialCoordDefType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'HEALPIX'), healpixType, scope=spatialCoordDefType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 614, 3)))
def _BuildAutomaton_21 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_21
del _BuildAutomaton_21
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'SPHERICAL')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 608, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'CARTESIAN')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 609, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'UNITSPHERE')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 610, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'POLAR')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 611, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'CYLINDRICAL')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 612, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'STRING')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 613, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
symbol = pyxb.binding.content.ElementUse(spatialCoordDefType._UseForTag(pyxb.namespace.ExpandedName(None, u'HEALPIX')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 614, 3))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
transitions = []
st_4._set_transitionSet(transitions)
transitions = []
st_5._set_transitionSet(transitions)
transitions = []
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
spatialCoordDefType._Automaton = _BuildAutomaton_21()
astronTimeType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Timescale'), timeScale, scope=astronTimeType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 818, 3)))
astronTimeType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'TimeOffset'), timeOffset, scope=astronTimeType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 819, 3)))
astronTimeType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'AbsoluteTime'), isoTime, scope=astronTimeType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 820, 3)))
def _BuildAutomaton_22 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_22
del _BuildAutomaton_22
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 818, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 819, 3))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(astronTimeType._UseForTag(pyxb.namespace.ExpandedName(None, u'Timescale')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 818, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(astronTimeType._UseForTag(pyxb.namespace.ExpandedName(None, u'TimeOffset')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 819, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
symbol = pyxb.binding.content.ElementUse(astronTimeType._UseForTag(pyxb.namespace.ExpandedName(None, u'AbsoluteTime')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 820, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
astronTimeType._Automaton = _BuildAutomaton_22()
TAIMillisecsecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), TAIMillisecsecDateTime, scope=TAIMillisecsecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 894, 3)))
TAIMillisecsecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), TAIMillisecsecDateTime, scope=TAIMillisecsecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 895, 3)))
def _BuildAutomaton_23 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_23
del _BuildAutomaton_23
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(TAIMillisecsecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 894, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(TAIMillisecsecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 895, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
TAIMillisecsecDateTimeRange._Automaton = _BuildAutomaton_23()
UTCDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), UTCDateTime, scope=UTCDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 916, 3)))
UTCDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), UTCDateTime, scope=UTCDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 917, 3)))
def _BuildAutomaton_24 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_24
del _BuildAutomaton_24
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(UTCDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 916, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(UTCDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 917, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
UTCDateTimeRange._Automaton = _BuildAutomaton_24()
UTCMicrosecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), UTCMicrosecDateTime, scope=UTCMicrosecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 937, 3)))
UTCMicrosecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), UTCMicrosecDateTime, scope=UTCMicrosecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 938, 3)))
def _BuildAutomaton_25 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_25
del _BuildAutomaton_25
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(UTCMicrosecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 937, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(UTCMicrosecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 938, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
UTCMicrosecDateTimeRange._Automaton = _BuildAutomaton_25()
UTCMillisecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), UTCMillisecDateTime, scope=UTCMillisecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 958, 3)))
UTCMillisecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), UTCMillisecDateTime, scope=UTCMillisecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 959, 3)))
def _BuildAutomaton_26 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_26
del _BuildAutomaton_26
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(UTCMillisecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 958, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(UTCMillisecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 959, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
UTCMillisecDateTimeRange._Automaton = _BuildAutomaton_26()
UTCSecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), UTCSecDateTime, scope=UTCSecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 979, 3)))
UTCSecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), UTCSecDateTime, scope=UTCSecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 980, 3)))
def _BuildAutomaton_27 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_27
del _BuildAutomaton_27
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(UTCSecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 979, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(UTCSecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 980, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
UTCSecDateTimeRange._Automaton = _BuildAutomaton_27()
UTCTenthMicrosecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'start'), UTCTenthMicrosecDateTime, scope=UTCTenthMicrosecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 989, 3)))
UTCTenthMicrosecDateTimeRange._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'end'), UTCTenthMicrosecDateTime, scope=UTCTenthMicrosecDateTimeRange, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 990, 3)))
def _BuildAutomaton_28 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_28
del _BuildAutomaton_28
import pyxb.utils.fac as fac
counters = set()
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(UTCTenthMicrosecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'start')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 989, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
symbol = pyxb.binding.content.ElementUse(UTCTenthMicrosecDateTimeRange._UseForTag(pyxb.namespace.ExpandedName(None, u'end')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 990, 3))
st_1 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_1, [
]))
st_0._set_transitionSet(transitions)
transitions = []
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
UTCTenthMicrosecDateTimeRange._Automaton = _BuildAutomaton_28()
regionType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Area'), regionAreaType, scope=regionType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 231, 5)))
def _BuildAutomaton_29 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_29
del _BuildAutomaton_29
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 231, 5))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(regionType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(regionType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(regionType._UseForTag(pyxb.namespace.ExpandedName(None, u'Area')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 231, 5))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
regionType._Automaton = _BuildAutomaton_29()
timeIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'StartTime'), astronTimeType, nillable=pyxb.binding.datatypes.boolean(1), scope=timeIntervalType, documentation=u'astronTime may be expressed in ISO8601 or as a double relative to a reference time', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1034, 5)))
timeIntervalType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'StopTime'), astronTimeType, nillable=pyxb.binding.datatypes.boolean(1), scope=timeIntervalType, documentation=u'astronTime may be expressed in ISO8601 or as a double relative to a reference time', location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1039, 5)))
def _BuildAutomaton_30 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_30
del _BuildAutomaton_30
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
counters.add(cc_1)
states = []
final_update = None
symbol = pyxb.binding.content.ElementUse(timeIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = None
symbol = pyxb.binding.content.ElementUse(timeIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = None
symbol = pyxb.binding.content.ElementUse(timeIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'StartTime')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1034, 5))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
symbol = pyxb.binding.content.ElementUse(timeIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'StopTime')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 1039, 5))
st_3 = fac.State(symbol, is_initial=False, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
]))
st_2._set_transitionSet(transitions)
transitions = []
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, False, containing_state=None)
timeIntervalType._Automaton = _BuildAutomaton_30()
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 21, 3)))
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 22, 3)))
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Error'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 23, 3)))
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Resolution'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 24, 3)))
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Size'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 25, 3)))
basicCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PixSize'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=basicCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 26, 3)))
def _BuildAutomaton_31 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_31
del _BuildAutomaton_31
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 21, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 22, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 23, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 24, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 25, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 26, 3))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 21, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 22, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Error')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 23, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Resolution')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 24, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Size')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 25, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(basicCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'PixSize')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 26, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
basicCoordinateType._Automaton = _BuildAutomaton_31()
pixelVector1CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=pixelVector1CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 36, 3)))
pixelVector1CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=pixelVector1CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 37, 3)))
def _BuildAutomaton_32 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_32
del _BuildAutomaton_32
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 36, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 37, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(pixelVector1CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 36, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(pixelVector1CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 37, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
pixelVector1CoordinateType._Automaton = _BuildAutomaton_32()
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name'), pyxb.binding.datatypes.string, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 48, 3)))
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'TimeInstant'), astronTimeType, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 49, 3)))
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Error'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 50, 3)))
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Resolution'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 51, 3)))
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Size'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 52, 3)))
timeCoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PixSize'), _ImportedBinding_euclid_dm__dtd.double1Type, scope=timeCoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 53, 3)))
def _BuildAutomaton_33 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_33
del _BuildAutomaton_33
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 48, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 49, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 50, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 51, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 52, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 53, 3))
counters.add(cc_5)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 48, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'TimeInstant')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 49, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Error')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 50, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Resolution')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 51, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Size')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 52, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(timeCoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'PixSize')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 53, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
st_5._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
timeCoordinateType._Automaton = _BuildAutomaton_33()
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name1'), pyxb.binding.datatypes.string, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 66, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name2'), pyxb.binding.datatypes.string, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 67, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 68, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Error2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 69, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Resolution2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 70, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Size2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 71, 3)))
vector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PixSize2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=vector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 72, 3)))
def _BuildAutomaton_34 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_34
del _BuildAutomaton_34
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 66, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 67, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 68, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 69, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 70, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 71, 3))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 72, 3))
counters.add(cc_6)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 66, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 67, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 68, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Error2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 69, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Resolution2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 70, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Size2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 71, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(vector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'PixSize2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 72, 3))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
st_6._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
vector2CoordinateType._Automaton = _BuildAutomaton_34()
pixelVector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name1'), pyxb.binding.datatypes.string, scope=pixelVector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 82, 3)))
pixelVector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name2'), pyxb.binding.datatypes.string, scope=pixelVector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 83, 3)))
pixelVector2CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value2'), _ImportedBinding_euclid_dm__dtd.double2Type, scope=pixelVector2CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 84, 3)))
def _BuildAutomaton_35 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_35
del _BuildAutomaton_35
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 82, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 83, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 84, 3))
counters.add(cc_2)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(pixelVector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 82, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(pixelVector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 83, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(pixelVector2CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 84, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
st_2._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
pixelVector2CoordinateType._Automaton = _BuildAutomaton_35()
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name1'), pyxb.binding.datatypes.string, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 96, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name2'), pyxb.binding.datatypes.string, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 97, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name3'), pyxb.binding.datatypes.string, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 98, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 99, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Error3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 100, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Resolution3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 101, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Size3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 102, 3)))
vector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'PixSize3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=vector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 103, 3)))
def _BuildAutomaton_36 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_36
del _BuildAutomaton_36
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 96, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 97, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 98, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 99, 3))
counters.add(cc_3)
cc_4 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 100, 3))
counters.add(cc_4)
cc_5 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 101, 3))
counters.add(cc_5)
cc_6 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 102, 3))
counters.add(cc_6)
cc_7 = fac.CounterCondition(min=0L, max=2L, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 103, 3))
counters.add(cc_7)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 96, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 97, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 98, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 99, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_4, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Error3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 100, 3))
st_4 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_4)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_5, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Resolution3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 101, 3))
st_5 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_5)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_6, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Size3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 102, 3))
st_6 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_6)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_7, False))
symbol = pyxb.binding.content.ElementUse(vector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'PixSize3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 103, 3))
st_7 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_7)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_2, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_3, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_3, False) ]))
st_3._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_4, [
fac.UpdateInstruction(cc_4, True) ]))
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_4, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_4, False) ]))
st_4._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_5, [
fac.UpdateInstruction(cc_5, True) ]))
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_5, False) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_5, False) ]))
st_5._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_6, [
fac.UpdateInstruction(cc_6, True) ]))
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_6, False) ]))
st_6._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_7, [
fac.UpdateInstruction(cc_7, True) ]))
st_7._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
vector3CoordinateType._Automaton = _BuildAutomaton_36()
pixelVector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name1'), pyxb.binding.datatypes.string, scope=pixelVector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 113, 3)))
pixelVector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name2'), pyxb.binding.datatypes.string, scope=pixelVector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 114, 3)))
pixelVector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Name3'), pyxb.binding.datatypes.string, scope=pixelVector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 115, 3)))
pixelVector3CoordinateType._AddElement(pyxb.binding.basis.element(pyxb.namespace.ExpandedName(None, u'Value3'), _ImportedBinding_euclid_dm__dtd.double3Type, scope=pixelVector3CoordinateType, location=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 116, 3)))
def _BuildAutomaton_37 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_37
del _BuildAutomaton_37
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 113, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 114, 3))
counters.add(cc_1)
cc_2 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 115, 3))
counters.add(cc_2)
cc_3 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 116, 3))
counters.add(cc_3)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(pixelVector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name1')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 113, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(pixelVector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name2')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 114, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_2, False))
symbol = pyxb.binding.content.ElementUse(pixelVector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Name3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 115, 3))
st_2 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_2)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_3, False))
symbol = pyxb.binding.content.ElementUse(pixelVector3CoordinateType._UseForTag(pyxb.namespace.ExpandedName(None, u'Value3')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 116, 3))
st_3 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_3)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_0, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_1, False) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_1, False) ]))
st_1._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_2, [
fac.UpdateInstruction(cc_2, True) ]))
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_2, False) ]))
st_2._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_3, [
fac.UpdateInstruction(cc_3, True) ]))
st_3._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
pixelVector3CoordinateType._Automaton = _BuildAutomaton_37()
def _BuildAutomaton_38 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_38
del _BuildAutomaton_38
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(spectralIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(spectralIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
spectralIntervalType._Automaton = _BuildAutomaton_38()
def _BuildAutomaton_39 ():
# Remove this helper function from the namespace after it is invoked
global _BuildAutomaton_39
del _BuildAutomaton_39
import pyxb.utils.fac as fac
counters = set()
cc_0 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
counters.add(cc_0)
cc_1 = fac.CounterCondition(min=0L, max=1, metadata=pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
counters.add(cc_1)
states = []
final_update = set()
final_update.add(fac.UpdateInstruction(cc_0, False))
symbol = pyxb.binding.content.ElementUse(redshiftIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'LoLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 127, 3))
st_0 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_0)
final_update = set()
final_update.add(fac.UpdateInstruction(cc_1, False))
symbol = pyxb.binding.content.ElementUse(redshiftIntervalType._UseForTag(pyxb.namespace.ExpandedName(None, u'HiLimit')), pyxb.utils.utility.Location(u'/home/sartor/workspace/EUCLID/svn_tot/schema/branches/challenge4/Dictionary/bas/imp/stc/euc-test-stc.xsd', 132, 3))
st_1 = fac.State(symbol, is_initial=True, final_update=final_update, is_unordered_catenation=False)
states.append(st_1)
transitions = []
transitions.append(fac.Transition(st_0, [
fac.UpdateInstruction(cc_0, True) ]))
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_0, False) ]))
st_0._set_transitionSet(transitions)
transitions = []
transitions.append(fac.Transition(st_1, [
fac.UpdateInstruction(cc_1, True) ]))
st_1._set_transitionSet(transitions)
return fac.Automaton(states, counters, True, containing_state=None)
redshiftIntervalType._Automaton = _BuildAutomaton_39()
| 67.642388
| 1,432
| 0.772787
| 39,425
| 309,261
| 5.884134
| 0.029474
| 0.016139
| 0.024209
| 0.054521
| 0.848195
| 0.839531
| 0.816697
| 0.793618
| 0.787915
| 0.759899
| 0
| 0.017427
| 0.106392
| 309,261
| 4,571
| 1,433
| 67.657187
| 0.821999
| 0.069336
| 0
| 0.591366
| 1
| 0.182408
| 0.295084
| 0.238796
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.033431
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
820d30b93ddfb7e5936faf6c501c7596f1cff558
| 19,669
|
py
|
Python
|
parser/fase2/team03/parse/expressions/expressions_math.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 35
|
2020-12-07T03:11:43.000Z
|
2021-04-15T17:38:16.000Z
|
parser/fase2/team03/parse/expressions/expressions_math.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 47
|
2020-12-09T01:29:09.000Z
|
2021-01-13T05:37:50.000Z
|
parser/fase2/team03/parse/expressions/expressions_math.py
|
Josue-Zea/tytus
|
f9e4be9a8c03eb698fade7a748972e4f52d46685
|
[
"MIT"
] | 556
|
2020-12-07T03:13:31.000Z
|
2021-06-17T17:41:10.000Z
|
import math
import random
import numpy as np
from parse.ast_node import ASTNode
from parse.errors import Error, ErrorType
# From here on, classes describing various mathematical operations
class Abs(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return abs(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'ABS({self.exp.generate(table, tree)})'
class Cbrt(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return np.cbrt(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'CBRT({self.exp.generate(table, tree)})'
class Ceil(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.ceil(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'CEIL({self.exp.generate(table, tree)})'
class Degrees(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.degrees(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'DEGREES({self.exp.generate(table, tree)})'
class Div(ASTNode):
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
try:
return exp1 // exp2
except ZeroDivisionError:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'ZeroDivisionError: integer division or modulo by zero'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError: Both arguments must be a real number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'DIV({self.exp.generate(table, tree)})'
class Exp(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.exp(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'EXP({self.exp.generate(table, tree)})'
class Factorial(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.factorial()
except:
raise (
Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError: only accepts integral positive values'))
def generate(self, table, tree):
super().generate(table, tree)
return f'FACTORIAL({self.exp.generate(table, tree)})'
class Floor(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.floor(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'FLOOR({self.exp.generate(table, tree)})'
class Gcd(ASTNode):
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
try:
return math.gcd(exp1, exp2)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: Both arguments must be a integral number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'GCD({self.exp.generate(table, tree)})'
class Lcm(ASTNode): # Only available on Python 3.9+, please update your python version
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
try:
return math.lcm(exp1, exp2)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: Both arguments must be a integral number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'LCM({self.exp.generate(table, tree)})'
class Ln(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.log2(exp)
except ValueError:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: math domain error'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'LN({self.exp.generate(table, tree)})'
class Log(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.log(exp)
except ValueError:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: math domain error'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'LOG({self.exp.generate(table, tree)})'
class Log10(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.log10(exp)
except ValueError:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: math domain error'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'LOG10({self.exp.generate(table, tree)})'
# TODO MINSCALE() function not implemented, only returns the value of the argument
class MinScale(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
if isinstance(exp, int) or isinstance(exp, float):
return exp
else:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'MINSCALE({self.exp.generate(table, tree)})'
class Mod(ASTNode):
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
try:
return math.fmod(exp1, exp2)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError: Both arguments must be a number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'MOD({self.exp1.generate(table, tree)}, {self.exp2.generate(table, tree)})'
class PI(ASTNode):
def __init__(self, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
return math.pi
def generate(self, table, tree):
super().generate(table, tree)
return 'PI()'
class Power(ASTNode):
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
try:
return math.pow(exp1, exp2)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError: Both arguments must be a real number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'POWER({self.exp.generate(table, tree)}, {self.exp.generate(table, tree)})'
class Radians(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.radians(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'RADIANS({self.exp.generate(table, tree)})'
class Random(ASTNode):
def __init__(self, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
return random.random()
def generate(self, table, tree):
super().generate(table, tree)
return 'RANDOM()'
class Round(ASTNode):
def __init__(self, exp1, exp2, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
if self.exp2 != 0:
exp2 = self.exp2.execute(table, tree)
# try:
if self.exp2 == 0:
return round(exp1)
else:
return round(exp1, exp2)
# except :
# raise(Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError: Both arguments must be a real number'))
def generate(self, table, tree):
super().generate(table, tree)
return f'ROUND({self.exp1.generate(table, tree)}, {self.exp2.generate(table, tree)})'
class Scale(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
r = self.exp.execute(table, tree)
if isinstance(r, float) or isinstance(r, int):
if isinstance(r, float):
arr = r.__str__().split(".")
if len(arr) == 1:
return 0
else:
return len(arr[1])
else:
return 0
else:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(r))))
def generate(self, table, tree):
super().generate(table, tree)
return f'SCALE({self.exp.generate(table, tree)})'
class SetSeed(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return random.seed(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: Math domain error'))
def generate(self, table, tree):
super().generate(table, tree)
return f'SETSEED({self.exp.generate(table, tree)})'
class Sign(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
if isinstance(exp, float) or isinstance(exp, int):
exp = int(np.sign(exp))
return exp
else:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: must be real number'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'SIGN({self.exp.generate(table, tree)})'
class Sqrt(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.sqrt(exp)
except ValueError:
raise (
Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: only accepts integral positive values'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'SQRT({self.exp.generate(table, tree)})'
# TODO TRIMSCALE() function not implemented, only returns the value of the argument
class TrimScale(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
if isinstance(exp, int) or isinstance(exp, float):
return exp
else:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'TRIMSCALE({self.exp.generate(table, tree)})'
class Trunc(ASTNode):
def __init__(self, exp, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp = exp
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp = self.exp.execute(table, tree)
try:
return math.trunc(exp)
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC,
'TypeError: must be real number, not ' + str(type(exp))))
def generate(self, table, tree):
super().generate(table, tree)
return f'TRUNC({self.exp.generate(table, tree)})'
# TODO WIDTHBUCKET()function not implemented, only returns the sum of the arguments
class WidthBucket(ASTNode):
def __init__(self, exp1, exp2, exp3, exp4, line, column, graph_ref):
ASTNode.__init__(self, line, column)
self.exp1 = exp1
self.exp2 = exp2
self.exp3 = exp3
self.exp4 = exp4
self.graph_ref = graph_ref
def execute(self, table, tree):
super().execute(table, tree)
exp1 = self.exp1.execute(table, tree)
exp2 = self.exp2.execute(table, tree)
exp3 = self.exp3.execute(table, tree)
exp4 = self.exp4.execute(table, tree)
try:
if exp3 == exp2:
return 0
else:
return math.ceil((exp4 * exp1) / (exp3 - exp2))
except ValueError:
raise (
Error(self.line, self.column, ErrorType.SEMANTIC, 'ValueError: only accepts integral positive values'))
except:
raise (Error(self.line, self.column, ErrorType.SEMANTIC, 'TypeError:all arguments must be integers'))
def generate(self, table, tree):
super().generate(table, tree)
return ''
| 33.622222
| 120
| 0.59215
| 2,389
| 19,669
| 4.748849
| 0.053997
| 0.134068
| 0.086029
| 0.085677
| 0.904716
| 0.897488
| 0.836051
| 0.830234
| 0.826267
| 0.825386
| 0
| 0.008632
| 0.287356
| 19,669
| 584
| 121
| 33.679795
| 0.800742
| 0.02537
| 0
| 0.770878
| 0
| 0.006424
| 0.11753
| 0.042743
| 0
| 0
| 0
| 0.001712
| 0
| 1
| 0.173448
| false
| 0
| 0.010707
| 0
| 0.366167
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
41b64c0683df4d03aea1b1d74decc922147854b9
| 424
|
py
|
Python
|
class_wrapping/test.py
|
shinsumicco/pybind11-tutorials
|
b2f544653035172f1a7e489942dc8b796e7df72b
|
[
"MIT"
] | null | null | null |
class_wrapping/test.py
|
shinsumicco/pybind11-tutorials
|
b2f544653035172f1a7e489942dc8b796e7df72b
|
[
"MIT"
] | null | null | null |
class_wrapping/test.py
|
shinsumicco/pybind11-tutorials
|
b2f544653035172f1a7e489942dc8b796e7df72b
|
[
"MIT"
] | null | null | null |
import stack
st = stack.stack()
print("size: {}".format(st.get_size()))
print("{}\n".format(st.get_stacked()))
st.push(1)
print("size: {}".format(st.get_size()))
print("{}\n".format(st.get_stacked()))
st.push(5)
st.push(24)
print("size: {}".format(st.get_size()))
print("{}\n".format(st.get_stacked()))
for i in range(10):
st.push(i * 3)
print("size: {}".format(st.get_size()))
print("{}\n".format(st.get_stacked()))
| 22.315789
| 39
| 0.627358
| 71
| 424
| 3.633803
| 0.267606
| 0.248062
| 0.341085
| 0.263566
| 0.790698
| 0.790698
| 0.790698
| 0.790698
| 0.790698
| 0.790698
| 0
| 0.017949
| 0.080189
| 424
| 18
| 40
| 23.555556
| 0.64359
| 0
| 0
| 0.533333
| 0
| 0
| 0.113208
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.066667
| 0
| 0.066667
| 0.533333
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 9
|
68cddb377ca5949dd00274d33326936b12e9e36a
| 59,719
|
py
|
Python
|
treads/recovery_trajectory.py
|
galvisf/treads
|
89e287dd1541c103f62d73479b14119166bb29ae
|
[
"MIT"
] | null | null | null |
treads/recovery_trajectory.py
|
galvisf/treads
|
89e287dd1541c103f62d73479b14119166bb29ae
|
[
"MIT"
] | null | null | null |
treads/recovery_trajectory.py
|
galvisf/treads
|
89e287dd1541c103f62d73479b14119166bb29ae
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Pouria Kourehpaz.
#
# This file is part of downtime assessment framework.
#
# This code is developed to support the framework to estimate earthquake induced downtime
# and recovery trajectory of residential buildings
# proposed by Molina Hutt et al. (2021).
#
# The proposed framework can be found in the manuscript entitled
# Molina Hutt, C., Vahanvaty, T., and Kourehpaz, P. (2021)
# "an analytical framework to assess earthquake induced downtime and model recovery of buildings", Earthquake Spectra.
#
#
# Contributor(s):
# Pouria Kourehpaz
"""
This module calculates recovery trajectories
"""
def RecTr_calc(story, rep_phases, Qt_facade, reconst_time, indx_repairable, indx_irreparable, \
indx_collapse, IF_output, RT_RC2_RS_days, RT_RC3_RS_days, RT_RC4_RS_days, RCmax_RS, N_DMG_RC3_RS, output_path):
import numpy as np
import pandas as pd
import os
np.seterr(divide='ignore', invalid='ignore')
IF_inspection = IF_output[0]
IF_eng = IF_output[1]
IF_permit = IF_output[2]
IF_finance = IF_output[3]
IF_cm_rs1 = IF_output[4]
IF_cm_rs2 = IF_output[5]
IF_cm_rs3 = IF_output[6]
IF_cm_rs4 = IF_output[7]
IF_cm_rs5 = IF_output[8]
IF_cm_rs6 = IF_output[9]
IF_cm_rs7 = IF_output[10]
IF_stab = IF_output[11]
IF_reconst = IF_output[12]
story_bm = rep_phases[len(rep_phases)-1] #number of basement stories
story_gr = sum(rep_phases) - story_bm #number of above grade stories
usability_repairable = np.append([1,0],np.linspace(0, 1, story_gr+1))
usability_irreparable_1 = np.append([1], np.zeros(len(usability_repairable)-2))
usability_irreparable = np.append(usability_irreparable_1, [1])
usability = np.vstack((usability_repairable.T,usability_irreparable.T))
#downtime for irreparable and collapse scenarios
DT_final_irreparable = np.zeros((len(indx_irreparable)+len(indx_collapse), len(usability_repairable)))
DT_irr_tot = IF_reconst + np.ones(len(indx_irreparable)+len(indx_collapse))*reconst_time*sum(rep_phases)
DT_final_irreparable[:,-1] = DT_irr_tot
DT_final_irreparable[:,-2] = DT_irr_tot
##downtime to functional recovery
RT_RC_RS_days = RT_RC2_RS_days
#downtime calculation for repair path
DT_A1 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A2 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A4 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A5 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_B = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_C = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_D = np.zeros((len(indx_repairable), len(usability_repairable)))
max_RTbm_2_4_5 = np.maximum.reduce([RT_RC_RS_days[:,-6-7*(story_bm-1):-5:7], RT_RC_RS_days[:,-4-7*(story_bm-1):-3:7], RT_RC_RS_days[:,-3-7*(story_bm-1):-2:7]])
DT_A1[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs1, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-7-7*(story_bm-1):-6:7]+max_RTbm_2_4_5, axis=1)
DT_A2[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs2, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-6-7*(story_bm-1):-5:7], axis=1)
DT_A4[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs4, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-4-7*(story_bm-1):-3:7], axis=1)
DT_A5[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs5, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-3-7*(story_bm-1):-2:7], axis=1)
DT_A = np.maximum.reduce([DT_A1, DT_A2, DT_A4, DT_A5])
DT_B[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs3, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-5-7*(story_bm-1):-4:7], axis=1)
DT_C[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs6, IF_eng+IF_permit]) + sum(RT_RC_RS_days[:,-2-7*(story_bm-1):-1:7].T) #2 workers per elevator for the entire bld
DT_D[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs7, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-1-7*(story_bm-1):len(RT_RC_RS_days.T):7], axis=1)
#dowtime calculation for each rapair phase assuming rapair is peformed every 1, 2, or 3 stories
RT_RS1 = np.zeros((len(indx_repairable), story_gr))
RT_A1 = np.zeros((len(indx_repairable), story_gr))
RT_A2 = np.zeros((len(indx_repairable), story_gr))
RT_A4 = np.zeros((len(indx_repairable), story_gr))
RT_A5 = np.zeros((len(indx_repairable), story_gr))
RT_B = np.zeros((len(indx_repairable), story_gr))
RT_C = np.zeros((len(indx_repairable), story_gr))
RT_D = np.zeros((len(indx_repairable), story_gr))
for i in range(len(indx_repairable)):
n=0
m=0
for j in range(len(rep_phases)-1):
if rep_phases[j]==1:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
m=m+1
n=n+rep_phases[j]*7
elif rep_phases[j]==2:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_RS1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
m=m+2
n=n+rep_phases[j]*7
elif rep_phases[j]==3:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_RS1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
RT_RS1[i,m+2] = min(RT_RS1[i,m+1] + min(RT_RC_RS_days[i,14+n], max_RT_RS1-RT_RC_RS_days[i,7+n]),max_RT_RS1)
RT_A1[i,m+2] = max(min(RT_RS1[i,m+1] + min(RT_RC_RS_days[i,14+n]+max_RTgr_2_4_5[2], max_RT_A1-RT_RC_RS_days[i,7+n]-max_RTgr_2_4_5[1]),max_RT_A1), RT_A1[i,m+1])
RT_A2[i,m+2] = min(RT_A2[i,m+1] + min(RT_RC_RS_days[i,15+n], max_RT_A2-RT_RC_RS_days[i,8+n]),max_RT_A2)
RT_A4[i,m+2] = min(RT_A4[i,m+1] + min(RT_RC_RS_days[i,17+n], max_RT_A4-RT_RC_RS_days[i,10+n]),max_RT_A4)
RT_A5[i,m+2] = min(RT_A5[i,m+1] + min(RT_RC_RS_days[i,18+n], max_RT_A5-RT_RC_RS_days[i,11+n]),max_RT_A5)
RT_B[i,m+2] = min(RT_B[i,m+1] + min(RT_RC_RS_days[i,16+n], max_RT_B-RT_RC_RS_days[i,9+n]),max_RT_B)
RT_C[i,m+2] = RT_C[i,m+1] + RT_RC_RS_days[i,19+n]
RT_D[i,m+2] = min(RT_D[i,m+1] + min(RT_RC_RS_days[i,20+n], max_RT_D-RT_RC_RS_days[i,13+n]),max_RT_D)
m=m+3
n=n+rep_phases[j]*7
for i in range(len(indx_repairable)):
for j in range(len(rep_phases)-1):
RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_RS1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A2[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A4[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A5[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_B[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_C[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_D[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A = np.maximum.reduce([RT_A1, RT_A2, RT_A4, RT_A5])
RT_RS2 = RT_A2
RT_RS4 = RT_A4
RT_RS5 = RT_A5
RT_RS3 = RT_B
RT_RS6 = RT_C
RT_RS7 = RT_D
#generate repair time stepping fuctions for functional recovery
with pd.ExcelWriter(os.path.join(output_path,r'RT_stepfunc_FR.xlsx')) as writer:
pd.DataFrame(RT_RS1).to_excel(writer, sheet_name='RSeq1', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS2).to_excel(writer, sheet_name='RSeq2', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS3).to_excel(writer, sheet_name='RSeq3', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS4).to_excel(writer, sheet_name='RSeq4', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS5).to_excel(writer, sheet_name='RSeq5', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS6).to_excel(writer, sheet_name='RSeq6', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS7).to_excel(writer, sheet_name='RSeq7', header=story[0:story_gr], index_label='#Num')
# remove IF if repair time for the repair path is zero
a=np.zeros(len(indx_repairable))
b=np.zeros(len(indx_repairable))
c=np.zeros(len(indx_repairable))
d=np.zeros(len(indx_repairable))
for i in range(len(indx_repairable)):
if max(RT_A[i,:]) != 0:
a[i]=1
if max(RT_B[i,:]) != 0:
b[i]=1
if max(RT_C[i,:]) != 0:
c[i]=1
if max(RT_D[i,:]) != 0:
d[i]=1
aa=np.tile(a,(len(usability_repairable),1)).T
bb=np.tile(b,(len(usability_repairable),1)).T
cc=np.tile(c,(len(usability_repairable),1)).T
dd=np.tile(d,(len(usability_repairable),1)).T
RT_A = RT_A + np.tile(DT_A[:,2],(story_gr,1)).T
RT_B = RT_B + np.tile(DT_B[:,2],(story_gr,1)).T
RT_C = RT_C + np.tile(DT_C[:,2],(story_gr,1)).T
RT_D = RT_D + np.tile(DT_D[:,2],(story_gr,1)).T
DT_A[:,3:]=RT_A
DT_B[:,3:]=RT_B
DT_C[:,3:]=RT_C
DT_D[:,3:]=RT_D
DT_final_repairable = np.maximum.reduce([DT_A*aa, DT_B*bb, DT_C*cc, DT_D*dd])
#utility time consideration for downtime to functional recovery calculation
DT_utility = np.zeros((len(DT_final_repairable),len(DT_final_repairable.T)))
k = np.maximum(np.random.lognormal(np.log(10),1,len(DT_final_repairable)),np.random.lognormal(np.log(4),.55,len(DT_final_repairable)),np.random.lognormal(np.log(3),1.2,len(DT_final_repairable)))
for i in range(len(DT_final_repairable)):
DT_utility[i,2:]=k[i]
DT_A = DT_A*aa
DT_B = DT_B*bb
DT_C = DT_C*cc
DT_D = DT_D*dd
mat_adj_A = np.where(np.divide(DT_A,np.transpose(np.repeat([DT_A[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_B = np.where(np.divide(DT_B,np.transpose(np.repeat([DT_B[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_C = np.where(np.divide(DT_C,np.transpose(np.repeat([DT_C[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_D = np.where(np.divide(DT_D,np.transpose(np.repeat([DT_D[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj = np.where(np.divide(DT_final_repairable,np.transpose(np.repeat([DT_final_repairable[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_A2 = np.concatenate((np.ones((len(DT_A),3)), mat_adj_A), axis=1)
mat_adj_B2 = np.concatenate((np.ones((len(DT_B),3)), mat_adj_B), axis=1)
mat_adj_C2 = np.concatenate((np.ones((len(DT_C),3)), mat_adj_C), axis=1)
mat_adj_D2 = np.concatenate((np.ones((len(DT_D),3)), mat_adj_D), axis=1)
mat_adj2 = np.concatenate((np.ones((len(DT_final_repairable),3)), mat_adj), axis=1)
DT_A = DT_A * mat_adj_A2
DT_B = DT_B * mat_adj_B2
DT_C = DT_C * mat_adj_C2
DT_D = DT_D * mat_adj_D2
DT_final_repairable = DT_final_repairable * mat_adj2
# adjustment for downtime stepping functions to ensure the usability can be restored if no repair is required in lower stories
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,3]==0:
indx = np.asarray(np.where(DT_final_repairable[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_final_repairable[i,indx_max]=DT_final_repairable[i,2]
DT_final_repairable[i,2]=0
#for i in range(len(DT_A)):
if DT_A[i,3]==0:
indx = np.asarray(np.where(DT_A[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_A[i,indx_max]=DT_A[i,2]
DT_A[i,2]=0
#for i in range(len(DT_B)):
if DT_B[i,3]==0:
indx = np.asarray(np.where(DT_B[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_B[i,indx_max]=DT_B[i,2]
DT_B[i,2]=0
#for i in range(len(DT_C)):
if DT_C[i,3]==0:
indx = np.asarray(np.where(DT_C[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_C[i,indx_max]=DT_C[i,2]
DT_C[i,2]=0
#for i in range(len(DT_D)):
if DT_D[i,3]==0:
indx = np.asarray(np.where(DT_D[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_D[i,indx_max]=DT_D[i,2]
DT_D[i,2]=0
#ensure that the downtime is not less than the inspection time + stability time in each repair phase
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,2]==0:
DT_final_repairable[i,2:][DT_final_repairable[i,2:]==0]=IF_inspection[i] + IF_stab[i]
#for i in range(len(DT_A)):
if DT_A[i,2]==0:
DT_A[i,2:][DT_A[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_B)):
if DT_B[i,2]==0:
DT_B[i,2:][DT_B[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_C)):
if DT_C[i,2]==0:
DT_C[i,2:][DT_C[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_D)):
if DT_D[i,2]==0:
DT_D[i,2:][DT_D[i,2:]==0]=IF_inspection[i]
#compare the utility repair time vs the total downtime only if FR is triggered
for i in range(len(DT_final_repairable)):
if (DT_final_repairable[i,-1]<DT_utility[i,-1]) and (DT_final_repairable[i,-1]!=DT_final_repairable[i,2]):
DT_final_repairable[i,:]=DT_utility[i,:]
row_id_rep=[]
for i in range(len(indx_repairable)):
row_id_rep.append('real_'+str(i)+'_repairable')
row_id_irr=[]
for i in range(len(indx_irreparable) + len(indx_collapse)):
row_id_irr.append('real_'+str(i)+'_irreparable')
row_id_all = row_id_rep + row_id_irr
row_id_all=['usability_repairable','usability_irreparable']+row_id_all
#generate downtime to functional recovery stepping functions
DT_final_RC2 = np.concatenate((DT_final_repairable, DT_final_irreparable), axis=0)
DT_final_RC2_use = np.concatenate((usability, DT_final_RC2), axis=0)
DT_final_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_final_RC2_use]
pd.DataFrame(DT_final_RC2_use).to_csv(os.path.join(output_path,r'DT_stepfunc_FR.csv'), header=None, index=None)
DT_A_RC2 = np.concatenate((DT_A, DT_final_irreparable), axis=0)
DT_B_RC2 = np.concatenate((DT_B, DT_final_irreparable), axis=0)
DT_C_RC2 = np.concatenate((DT_C, DT_final_irreparable), axis=0)
DT_D_RC2 = np.concatenate((DT_D, DT_final_irreparable), axis=0)
DT_utility = np.concatenate((DT_utility, DT_final_irreparable), axis=0)
DT_A_RC2_use = np.concatenate((usability, DT_A_RC2), axis=0)
DT_B_RC2_use = np.concatenate((usability, DT_B_RC2), axis=0)
DT_C_RC2_use = np.concatenate((usability, DT_C_RC2), axis=0)
DT_D_RC2_use = np.concatenate((usability, DT_D_RC2), axis=0)
DT_utility_use = np.concatenate((usability, DT_utility), axis=0)
DT_A_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_A_RC2_use]
DT_B_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_B_RC2_use]
DT_C_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_C_RC2_use]
DT_D_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_D_RC2_use]
DT_utility_use = np.c_[np.squeeze(row_id_all).T, DT_utility_use]
with pd.ExcelWriter(os.path.join(output_path,r'DT_path_FR.xlsx'), options= {'strings_to_numbers': True}) as writer:
pd.DataFrame(DT_A_RC2_use).to_excel(writer, sheet_name='A', header=None, index_label=None, index=False)
pd.DataFrame(DT_B_RC2_use).to_excel(writer, sheet_name='B', header=None, index_label=None, index=False)
pd.DataFrame(DT_C_RC2_use).to_excel(writer, sheet_name='C', header=None, index_label=None, index=False)
pd.DataFrame(DT_D_RC2_use).to_excel(writer, sheet_name='D', header=None, index_label=None, index=False)
pd.DataFrame(DT_utility_use).to_excel(writer, sheet_name='utility', header=None, index_label=None, index=False)
##downtime to reoccupancy
RT_RC_RS_days = RT_RC3_RS_days
#downtime calculation for repair path
# matrices per repair path
# cols = time to reach each usability percentage (increasing is steps of 1/story_bm since are each repair phase)
# rows = simulations
DT_A1 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A2 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A4 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A5 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_B = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_C = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_D = np.zeros((len(indx_repairable), len(usability_repairable)))
# Calculating the downtime for the first increase in usability from zero starting with the top repair phase
max_RTbm_2_4_5 = np.maximum.reduce([RT_RC_RS_days[:,-6-7*(story_bm-1):-5:7], RT_RC_RS_days[:,-4-7*(story_bm-1):-3:7], RT_RC_RS_days[:,-3-7*(story_bm-1):-2:7]])
DT_A1[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs1, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-7-7*(story_bm-1):-6:7]+max_RTbm_2_4_5, axis=1)
DT_A2[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs2, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-6-7*(story_bm-1):-5:7], axis=1)
DT_A4[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs4, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-4-7*(story_bm-1):-3:7], axis=1)
DT_A5[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs5, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-3-7*(story_bm-1):-2:7], axis=1)
DT_A = np.maximum.reduce([DT_A1, DT_A2, DT_A4, DT_A5])
DT_B[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs3, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-5-7*(story_bm-1):-4:7], axis=1)
DT_C[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs6, IF_eng+IF_permit]) + sum(RT_RC_RS_days[:,-2-7*(story_bm-1):-1:7].T) #2 workers per elevator for the entire bld
DT_D[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs7, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-1-7*(story_bm-1):len(RT_RC_RS_days.T):7], axis=1)
RT_RS1 = np.zeros((len(indx_repairable), story_gr))
RT_A1 = np.zeros((len(indx_repairable), story_gr))
RT_A2 = np.zeros((len(indx_repairable), story_gr))
RT_A4 = np.zeros((len(indx_repairable), story_gr))
RT_A5 = np.zeros((len(indx_repairable), story_gr))
RT_B = np.zeros((len(indx_repairable), story_gr))
RT_C = np.zeros((len(indx_repairable), story_gr))
RT_D = np.zeros((len(indx_repairable), story_gr))
for i in range(len(indx_repairable)):
n=0
m=0
for j in range(len(rep_phases)-1):
if rep_phases[j]==1:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
m=m+1
n=n+rep_phases[j]*7
elif rep_phases[j]==2:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_RS1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
m=m+2
n=n+rep_phases[j]*7
elif rep_phases[j]==3:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_A1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
RT_RS1[i,m+2] = min(RT_A1[i,m+1] + min(RT_RC_RS_days[i,14+n], max_RT_RS1-RT_RC_RS_days[i,7+n]),max_RT_RS1)
RT_A1[i,m+2] = max(min(RT_RS1[i,m+1] + min(RT_RC_RS_days[i,14+n]+max_RTgr_2_4_5[2], max_RT_A1-RT_RC_RS_days[i,7+n]-max_RTgr_2_4_5[1]),max_RT_A1), RT_A1[i,m+1])
RT_A2[i,m+2] = min(RT_A2[i,m+1] + min(RT_RC_RS_days[i,15+n], max_RT_A2-RT_RC_RS_days[i,8+n]),max_RT_A2)
RT_A4[i,m+2] = min(RT_A4[i,m+1] + min(RT_RC_RS_days[i,17+n], max_RT_A4-RT_RC_RS_days[i,10+n]),max_RT_A4)
RT_A5[i,m+2] = min(RT_A5[i,m+1] + min(RT_RC_RS_days[i,18+n], max_RT_A5-RT_RC_RS_days[i,11+n]),max_RT_A5)
RT_B[i,m+2] = min(RT_B[i,m+1] + min(RT_RC_RS_days[i,16+n], max_RT_B-RT_RC_RS_days[i,9+n]),max_RT_B)
RT_C[i,m+2] = RT_C[i,m+1] + RT_RC_RS_days[i,19+n]
RT_D[i,m+2] = min(RT_D[i,m+1] + min(RT_RC_RS_days[i,20+n], max_RT_D-RT_RC_RS_days[i,13+n]),max_RT_D)
m=m+3
n=n+rep_phases[j]*7
for i in range(len(indx_repairable)):
for j in range(len(rep_phases)-1):
RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_RS1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A2[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A4[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A5[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_B[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_C[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_D[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A = np.maximum.reduce([RT_A1, RT_A2, RT_A4, RT_A5])
RT_RS2 = RT_A2
RT_RS4 = RT_A4
RT_RS5 = RT_A5
RT_RS3 = RT_B
RT_RS6 = RT_C
RT_RS7 = RT_D
with pd.ExcelWriter(os.path.join(output_path,r'RT_stepfunc_RO.xlsx')) as writer:
pd.DataFrame(RT_RS1).to_excel(writer, sheet_name='RSeq1', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS2).to_excel(writer, sheet_name='RSeq2', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS3).to_excel(writer, sheet_name='RSeq3', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS4).to_excel(writer, sheet_name='RSeq4', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS5).to_excel(writer, sheet_name='RSeq5', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS6).to_excel(writer, sheet_name='RSeq6', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS7).to_excel(writer, sheet_name='RSeq7', header=story[0:story_gr], index_label='#Num')
a=np.zeros(len(indx_repairable))
b=np.zeros(len(indx_repairable))
c=np.zeros(len(indx_repairable))
d=np.zeros(len(indx_repairable))
for i in range(len(indx_repairable)):
if max(RT_A[i,:]) != 0:
a[i]=1
if max(RT_B[i,:]) != 0:
b[i]=1
if max(RT_C[i,:]) != 0:
c[i]=1
if max(RT_D[i,:]) != 0:
d[i]=1
aa=np.tile(a,(len(usability_repairable),1)).T
bb=np.tile(b,(len(usability_repairable),1)).T
cc=np.tile(c,(len(usability_repairable),1)).T
dd=np.tile(d,(len(usability_repairable),1)).T
RT_A = RT_A + np.tile(DT_A[:,2],(story_gr,1)).T
RT_B = RT_B + np.tile(DT_B[:,2],(story_gr,1)).T
RT_C = RT_C + np.tile(DT_C[:,2],(story_gr,1)).T
RT_D = RT_D + np.tile(DT_D[:,2],(story_gr,1)).T
DT_A[:,3:]=RT_A
DT_B[:,3:]=RT_B
DT_C[:,3:]=RT_C
DT_D[:,3:]=RT_D
DT_final_repairable = np.maximum.reduce([DT_A*aa, DT_B*bb, DT_D*dd])
DT_A = DT_A*aa
DT_B = DT_B*bb
DT_D = DT_D*dd
mat_adj_A = np.where(np.divide(DT_A,np.transpose(np.repeat([DT_A[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_B = np.where(np.divide(DT_B,np.transpose(np.repeat([DT_B[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_D = np.where(np.divide(DT_D,np.transpose(np.repeat([DT_D[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj = np.where(np.divide(DT_final_repairable,np.transpose(np.repeat([DT_final_repairable[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_A2 = np.concatenate((np.ones((len(DT_A),3)), mat_adj_A), axis=1)
mat_adj_B2 = np.concatenate((np.ones((len(DT_B),3)), mat_adj_B), axis=1)
mat_adj_D2 = np.concatenate((np.ones((len(DT_D),3)), mat_adj_D), axis=1)
mat_adj2 = np.concatenate((np.ones((len(DT_final_repairable),3)), mat_adj), axis=1)
DT_A = DT_A * mat_adj_A2
DT_B = DT_B * mat_adj_B2
DT_D = DT_D * mat_adj_D2
DT_final_repairable = DT_final_repairable * mat_adj2
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,3]==0:
indx = np.asarray(np.where(DT_final_repairable[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_final_repairable[i,indx_max]=DT_final_repairable[i,2]
DT_final_repairable[i,2]=0
#for i in range(len(DT_A)):
if DT_A[i,3]==0:
indx = np.asarray(np.where(DT_A[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_A[i,indx_max]=DT_A[i,2]
DT_A[i,2]=0
#for i in range(len(DT_B)):
if DT_B[i,3]==0:
indx = np.asarray(np.where(DT_B[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_B[i,indx_max]=DT_B[i,2]
DT_B[i,2]=0
#for i in range(len(DT_D)):
if DT_D[i,3]==0:
indx = np.asarray(np.where(DT_D[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_D[i,indx_max]=DT_D[i,2]
DT_D[i,2]=0
# ensure that the downtime is not less than the inspection time + stability time in each repair phase
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,2]==0:
DT_final_repairable[i,2:][DT_final_repairable[i,2:]==0]=IF_inspection[i] + IF_stab[i]
#for i in range(len(DT_A)):
if DT_A[i,2]==0:
DT_A[i,2:][DT_A[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_B)):
if DT_B[i,2]==0:
DT_B[i,2:][DT_B[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_D)):
if DT_D[i,2]==0:
DT_D[i,2:][DT_D[i,2:]==0]=IF_inspection[i]
DT_final_RC3 = np.concatenate((DT_final_repairable, DT_final_irreparable), axis=0)
DT_final_RC3_use = np.concatenate((usability, DT_final_RC3), axis=0)
DT_final_RC3_use = np.c_[np.squeeze(row_id_all).T, DT_final_RC3_use]
pd.DataFrame(DT_final_RC3_use).to_csv(os.path.join(output_path,r'DT_stepfunc_RO.csv'), header=None, index=None)
DT_A_RC2 = np.concatenate((DT_A, DT_final_irreparable), axis=0)
DT_B_RC2 = np.concatenate((DT_B, DT_final_irreparable), axis=0)
DT_D_RC2 = np.concatenate((DT_D, DT_final_irreparable), axis=0)
DT_A_RC2_use = np.concatenate((usability, DT_A_RC2), axis=0)
DT_B_RC2_use = np.concatenate((usability, DT_B_RC2), axis=0)
DT_D_RC2_use = np.concatenate((usability, DT_D_RC2), axis=0)
#elevator (path C) is not required to be repaired to achieve reoccupancy
DT_A_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_A_RC2_use]
DT_B_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_B_RC2_use]
DT_D_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_D_RC2_use]
with pd.ExcelWriter(os.path.join(output_path,r'DT_path_RO.xlsx'), options= {'strings_to_numbers': True}) as writer:
pd.DataFrame(DT_A_RC2_use).to_excel(writer, sheet_name='A', header=None, index_label=None, index=False)
pd.DataFrame(DT_B_RC2_use).to_excel(writer, sheet_name='B', header=None, index_label=None, index=False)
pd.DataFrame(DT_D_RC2_use).to_excel(writer, sheet_name='D', header=None, index_label=None, index=False)
##downtime to shelter-in-place
RT_RC_RS_days = RT_RC4_RS_days
#repair paths:
DT_A1 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A2 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A4 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_A5 = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_B = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_C = np.zeros((len(indx_repairable), len(usability_repairable)))
DT_D = np.zeros((len(indx_repairable), len(usability_repairable)))
max_RTbm_2_4_5 = np.maximum(RT_RC_RS_days[:,-6-7*(story_bm-1):-5:7], RT_RC_RS_days[:,-4-7*(story_bm-1):-3:7], RT_RC_RS_days[:,-3-7*(story_bm-1):-2:7])
DT_A1[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs1, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-7-7*(story_bm-1):-6:7]+max_RTbm_2_4_5, axis=1)
DT_A = DT_A1
DT_B[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs3, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-5-7*(story_bm-1):-4:7], axis=1)
DT_C[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs6, IF_eng+IF_permit]) + sum(RT_RC_RS_days[:,-2-7*(story_bm-1):-1:7].T) #2 workers per elevator for the entire bld
DT_D[:,2] = IF_inspection + np.maximum.reduce([IF_stab, IF_finance, IF_cm_rs7, IF_eng+IF_permit]) + np.amax(RT_RC_RS_days[:,-1-7*(story_bm-1):len(RT_RC_RS_days.T):7], axis=1)
RT_RS1 = np.zeros((len(indx_repairable), story_gr))
RT_A1 = np.zeros((len(indx_repairable), story_gr))
RT_A2 = np.zeros((len(indx_repairable), story_gr))
RT_A4 = np.zeros((len(indx_repairable), story_gr))
RT_A5 = np.zeros((len(indx_repairable), story_gr))
RT_B = np.zeros((len(indx_repairable), story_gr))
RT_C = np.zeros((len(indx_repairable), story_gr))
RT_D = np.zeros((len(indx_repairable), story_gr))
for i in range(len(indx_repairable)):
n=0
m=0
for j in range(len(rep_phases)-1):
if rep_phases[j]==1:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
m=m+1
n=n+rep_phases[j]*7
elif rep_phases[j]==2:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_RS1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
m=m+2
n=n+rep_phases[j]*7
elif rep_phases[j]==3:
max_RTgr_2_4_5 = np.maximum.reduce([RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7],RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7],RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7]])
max_RT_A1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7]+max_RTgr_2_4_5)
max_RT_RS1 = np.amax(RT_RC_RS_days[i,0+n:0+7*rep_phases[j]+n:7])
max_RT_A2 = np.amax(RT_RC_RS_days[i,1+n:1+7*rep_phases[j]+n:7])
max_RT_A4 = np.amax(RT_RC_RS_days[i,3+n:3+7*rep_phases[j]+n:7])
max_RT_A5 = np.amax(RT_RC_RS_days[i,4+n:4+7*rep_phases[j]+n:7])
max_RT_B = np.amax(RT_RC_RS_days[i,2+n:2+7*rep_phases[j]+n:7])
max_RT_D = np.amax(RT_RC_RS_days[i,6+n:6+7*rep_phases[j]+n:7])
RT_RS1[i,m] = min(RT_RC_RS_days[i,0+n],max_RT_RS1)
RT_A1[i,m] = min(RT_RC_RS_days[i,0+n]+max_RTgr_2_4_5[0],max_RT_A1)
RT_A2[i,m] = min(RT_RC_RS_days[i,1+n],max_RT_A2)
RT_A4[i,m] = min(RT_RC_RS_days[i,3+n],max_RT_A4)
RT_A5[i,m] = min(RT_RC_RS_days[i,4+n],max_RT_A5)
RT_B[i,m] = min(RT_RC_RS_days[i,2+n],max_RT_B)
RT_C[i,m] = RT_RC_RS_days[i,5+n]
RT_D[i,m] = min(RT_RC_RS_days[i,6+n],max_RT_D)
RT_RS1[i,m+1] = min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n], max_RT_A1-RT_RC_RS_days[i,0+n]),max_RT_RS1)
RT_A1[i,m+1] = max(min(RT_RS1[i,m] + min(RT_RC_RS_days[i,7+n]+max_RTgr_2_4_5[1], max_RT_A1-RT_RC_RS_days[i,0+n]-max_RTgr_2_4_5[0]),max_RT_A1), RT_A1[i,m])
RT_A2[i,m+1] = min(RT_A2[i,m] + min(RT_RC_RS_days[i,8+n], max_RT_A2-RT_RC_RS_days[i,1+n]),max_RT_A2)
RT_A4[i,m+1] = min(RT_A4[i,m] + min(RT_RC_RS_days[i,10+n], max_RT_A4-RT_RC_RS_days[i,3+n]),max_RT_A4)
RT_A5[i,m+1] = min(RT_A5[i,m] + min(RT_RC_RS_days[i,11+n], max_RT_A5-RT_RC_RS_days[i,4+n]),max_RT_A5)
RT_B[i,m+1] = min(RT_B[i,m] + min(RT_RC_RS_days[i,9+n], max_RT_B-RT_RC_RS_days[i,2+n]),max_RT_B)
RT_C[i,m+1] = RT_C[i,m] + RT_RC_RS_days[i,12+n]
RT_D[i,m+1] = min(RT_D[i,m] + min(RT_RC_RS_days[i,13+n], max_RT_D-RT_RC_RS_days[i,6+n]),max_RT_D)
RT_RS1[i,m+2] = min(RT_A1[i,m+1] + min(RT_RC_RS_days[i,14+n], max_RT_RS1-RT_RC_RS_days[i,7+n]),max_RT_RS1)
RT_A1[i,m+2] = max(min(RT_RS1[i,m+1] + min(RT_RC_RS_days[i,14+n]+max_RTgr_2_4_5[2], max_RT_A1-RT_RC_RS_days[i,7+n]-max_RTgr_2_4_5[1]),max_RT_A1), RT_A1[i,m+1])
RT_A2[i,m+2] = min(RT_A2[i,m+1] + min(RT_RC_RS_days[i,15+n], max_RT_A2-RT_RC_RS_days[i,8+n]),max_RT_A2)
RT_A4[i,m+2] = min(RT_A4[i,m+1] + min(RT_RC_RS_days[i,17+n], max_RT_A4-RT_RC_RS_days[i,10+n]),max_RT_A4)
RT_A5[i,m+2] = min(RT_A5[i,m+1] + min(RT_RC_RS_days[i,18+n], max_RT_A5-RT_RC_RS_days[i,11+n]),max_RT_A5)
RT_B[i,m+2] = min(RT_B[i,m+1] + min(RT_RC_RS_days[i,16+n], max_RT_B-RT_RC_RS_days[i,9+n]),max_RT_B)
RT_C[i,m+2] = RT_C[i,m+1] + RT_RC_RS_days[i,19+n]
RT_D[i,m+2] = min(RT_D[i,m+1] + min(RT_RC_RS_days[i,20+n], max_RT_D-RT_RC_RS_days[i,13+n]),max_RT_D)
m=m+3
n=n+rep_phases[j]*7
for i in range(len(indx_repairable)):
for j in range(len(rep_phases)-1):
RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_RS1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_RS1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A1[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A1[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A2[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A2[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A4[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A4[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_A5[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_A5[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_B[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_B[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_C[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_C[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])] = np.amax(RT_D[i,sum(rep_phases[:j]):sum(rep_phases[:j+1])]) + RT_D[i,sum(rep_phases[:j+1]):sum(rep_phases[:j+2])]
RT_A = RT_A1
RT_RS2 = RT_A2
RT_RS4 = RT_A4
RT_RS5 = RT_A5
RT_RS3 = RT_B
RT_RS6 = RT_C
RT_RS7 = RT_D
with pd.ExcelWriter(os.path.join(output_path,r'RT_stepfunc_SiP.xlsx'), options= {'strings_to_numbers': True}) as writer:
pd.DataFrame(RT_RS1).to_excel(writer, sheet_name='RSeq1', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS2).to_excel(writer, sheet_name='RSeq2', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS3).to_excel(writer, sheet_name='RSeq3', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS4).to_excel(writer, sheet_name='RSeq4', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS5).to_excel(writer, sheet_name='RSeq5', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS6).to_excel(writer, sheet_name='RSeq6', header=story[0:story_gr], index_label='#Num')
pd.DataFrame(RT_RS7).to_excel(writer, sheet_name='RSeq7', header=story[0:story_gr], index_label='#Num')
a=np.zeros(len(indx_repairable))
b=np.zeros(len(indx_repairable))
c=np.zeros(len(indx_repairable))
d=np.zeros(len(indx_repairable))
for i in range(len(indx_repairable)):
if max(RT_A[i,:]) != 0:
a[i]=1
if max(RT_B[i,:]) != 0:
b[i]=1
if max(RT_C[i,:]) != 0:
c[i]=1
if max(RT_D[i,:]) != 0:
d[i]=1
aa=np.tile(a,(len(usability_repairable),1)).T
bb=np.tile(b,(len(usability_repairable),1)).T
cc=np.tile(c,(len(usability_repairable),1)).T
dd=np.tile(d,(len(usability_repairable),1)).T
RT_A = RT_A + np.tile(DT_A[:,2],(story_gr,1)).T
RT_B = RT_B + np.tile(DT_B[:,2],(story_gr,1)).T
RT_C = RT_C + np.tile(DT_C[:,2],(story_gr,1)).T
RT_D = RT_D + np.tile(DT_D[:,2],(story_gr,1)).T
DT_A[:,3:]=RT_A
DT_B[:,3:]=RT_B
DT_C[:,3:]=RT_C
DT_D[:,3:]=RT_D
DT_final_repairable = np.maximum.reduce([DT_A*aa, DT_D*dd])
DT_A = DT_A*aa
DT_D = DT_D*dd
mat_adj_A = np.where(np.divide(DT_A,np.transpose(np.repeat([DT_A[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_D = np.where(np.divide(DT_D,np.transpose(np.repeat([DT_D[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj = np.where(np.divide(DT_final_repairable,np.transpose(np.repeat([DT_final_repairable[:,2]],story_gr+3,axis=0)))[:,3:]==1,0,1)
mat_adj_A2 = np.concatenate((np.ones((len(DT_A),3)), mat_adj_A), axis=1)
mat_adj_D2 = np.concatenate((np.ones((len(DT_D),3)), mat_adj_D), axis=1)
mat_adj2 = np.concatenate((np.ones((len(DT_final_repairable),3)), mat_adj), axis=1)
DT_A = DT_A * mat_adj_A2
DT_D = DT_D * mat_adj_D2
DT_final_repairable = DT_final_repairable * mat_adj2
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,3]==0:
indx = np.asarray(np.where(DT_final_repairable[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_final_repairable[i,indx_max]=DT_final_repairable[i,2]
DT_final_repairable[i,2]=0
#for i in range(len(DT_A)):
if DT_A[i,3]==0:
indx = np.asarray(np.where(DT_A[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_A[i,indx_max]=DT_A[i,2]
DT_A[i,2]=0
#for i in range(len(DT_D)):
if DT_D[i,3]==0:
indx = np.asarray(np.where(DT_D[i,:]==0))
indx_max = max(np.squeeze(indx))
DT_D[i,indx_max]=DT_D[i,2]
DT_D[i,2]=0
for i in range(len(DT_final_repairable)):
if DT_final_repairable[i,2]==0:
DT_final_repairable[i,2:][DT_final_repairable[i,2:]==0]=IF_inspection[i] + IF_stab[i]
#for i in range(len(DT_A)):
if DT_A[i,2]==0:
DT_A[i,2:][DT_A[i,2:]==0]=IF_inspection[i]
#for i in range(len(DT_D)):
if DT_D[i,2]==0:
DT_D[i,2:][DT_D[i,2:]==0]=IF_inspection[i]
DT_final_RC4 = np.concatenate((DT_final_repairable, DT_final_irreparable), axis=0)
DT_final_RC4_use = np.concatenate((usability, DT_final_RC4), axis=0)
DT_final_RC4_use = np.c_[np.squeeze(row_id_all).T, DT_final_RC4_use]
pd.DataFrame(DT_final_RC4_use).to_csv(os.path.join(output_path,r'DT_stepfunc_SiP.csv'), header=None, index=None)
DT_A_RC2 = np.concatenate((DT_A, DT_final_irreparable), axis=0)
DT_D_RC2 = np.concatenate((DT_D, DT_final_irreparable), axis=0)
DT_A_RC2_use = np.concatenate((usability, DT_A_RC2), axis=0)
DT_D_RC2_use = np.concatenate((usability, DT_D_RC2), axis=0)
#structural (path A) and staricase (path D) repairs are only required to achieve sheltering capacity
DT_A_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_A_RC2_use]
DT_D_RC2_use = np.c_[np.squeeze(row_id_all).T, DT_D_RC2_use]
with pd.ExcelWriter(os.path.join(output_path,r'DT_path_SiP.xlsx'), options= {'strings_to_numbers': True}) as writer:
pd.DataFrame(DT_A_RC2_use).to_excel(writer, sheet_name='A', header=None, index_label=None, index=False)
pd.DataFrame(DT_D_RC2_use).to_excel(writer, sheet_name='D', header=None, index_label=None, index=False)
#%% summary stats
zero_DT_RC2 = np.percentile(DT_final_RC2[:,-1],0)
tenth_DT_RC2 = np.percentile(DT_final_RC2[:,-1],10)
med_DT_RC2 = np.median(DT_final_RC2[:,-1])
mean_DT_RC2 = np.mean(DT_final_RC2[:,-1])
ninety_DT_RC2 = np.percentile(DT_final_RC2[:,-1],90)
hundred_DT_RC2 = np.percentile(DT_final_RC2[:,-1],100)
zero_DT_RC3 = np.percentile(DT_final_RC3[:,-1],0)
tenth_DT_RC3 = np.percentile(DT_final_RC3[:,-1],10)
med_DT_RC3 = np.median(DT_final_RC3[:,-1])
mean_DT_RC3 = np.mean(DT_final_RC3[:,-1])
ninety_DT_RC3 = np.percentile(DT_final_RC3[:,-1],90)
hundred_DT_RC3 = np.percentile(DT_final_RC3[:,-1],100)
zero_DT_RC4 = np.percentile(DT_final_RC4[:,-1],0)
tenth_DT_RC4 = np.percentile(DT_final_RC4[:,-1],10)
med_DT_RC4 = np.median(DT_final_RC4[:,-1])
mean_DT_RC4 = np.mean(DT_final_RC4[:,-1])
ninety_DT_RC4 = np.percentile(DT_final_RC4[:,-1],90)
hundred_DT_RC4 = np.percentile(DT_final_RC4[:,-1],100)
DT_summary_RC2 = [zero_DT_RC2, tenth_DT_RC2, med_DT_RC2, mean_DT_RC2, ninety_DT_RC2, hundred_DT_RC2]
DT_summary_RC3 = [zero_DT_RC3, tenth_DT_RC3, med_DT_RC3, mean_DT_RC3, ninety_DT_RC3, hundred_DT_RC3]
DT_summary_RC4 = [zero_DT_RC4, tenth_DT_RC4, med_DT_RC4, mean_DT_RC4, ninety_DT_RC4, hundred_DT_RC4]
row_id = ["Minimum", "10th Percentile", "Median", "Mean", "90th Percentile", "Maximum"]
col_id = ["Downtime","Functional Recovery", "Re-Occupancy", "Shelter-in-Place"]
DT_summ_1 = np.c_[(row_id), np.array(DT_summary_RC2), np.array(DT_summary_RC3), np.array(DT_summary_RC4)]
DT_summ = np.vstack((col_id,DT_summ_1))
pd.DataFrame(DT_summ).to_csv(os.path.join(output_path,r'DT_summary.csv'), header=False, index=False)
#determine the proability of hindering a recovery state based on the max repair class & damaged facade components for the stability recovery state
RCmax_repairable = np.max(np.squeeze(RCmax_RS), axis=1)
N_DMG_RC3_RS_mat = np.squeeze(N_DMG_RC3_RS)[:,np.arange(2,len(np.squeeze(N_DMG_RC3_RS).transpose()),7)]
N_DMG_RC3_RS3 = sum(N_DMG_RC3_RS_mat.transpose())
#stability is hindered if damage facade components exceed 50% of the total
for i in range(len(indx_repairable)):
if N_DMG_RC3_RS3[i] > 0.5*Qt_facade:
RCmax_repairable[i]=5
RCmax = np.append(RCmax_repairable, 5*np.ones(len(indx_irreparable)+len(indx_collapse)))
RS_stats = ['prob (RS not achieved)', len(RCmax[RCmax>=2])/len(RCmax), len(RCmax[RCmax>=3])/len(RCmax), len(RCmax[RCmax>=4])/len(RCmax)]
index_label = ['Recovery State','Functional Recovery','Reoccupancy','Shelter-in-Place']
pd.DataFrame(np.c_[index_label, RS_stats]).to_csv(os.path.join(output_path,r'RS_stats.csv'), header=False, index=False)
print('Downtime calculations for "Functional Recovery", "Re-Occupancy", and "Shelter-in-Place" recovery states are completed')
return DT_final_RC2, DT_final_RC3, DT_final_RC4, DT_summary_RC2, DT_summary_RC3, DT_summary_RC4, RCmax
| 64.145005
| 199
| 0.616052
| 12,131
| 59,719
| 2.709505
| 0.02745
| 0.061334
| 0.060239
| 0.100399
| 0.874258
| 0.864888
| 0.853905
| 0.847729
| 0.832487
| 0.823846
| 0
| 0.050644
| 0.205462
| 59,719
| 931
| 200
| 64.145005
| 0.642079
| 0.048259
| 0
| 0.833777
| 0
| 0.00133
| 0.015332
| 0.000376
| 0
| 0
| 0
| 0
| 0
| 1
| 0.00133
| false
| 0
| 0.003989
| 0
| 0.006649
| 0.00133
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6bab2817a9da02598c809001f74b35987409e2c6
| 208
|
py
|
Python
|
app/home/views.py
|
sunshineinwater/flask-Purchase_and_sale
|
6fb845da59e4b25737b67d344cbcb4185e93958c
|
[
"MIT"
] | 122
|
2019-04-09T03:21:31.000Z
|
2022-03-27T13:56:08.000Z
|
app/home/views.py
|
zhuhaiv5/flask-Purchase_and_sale
|
6fb845da59e4b25737b67d344cbcb4185e93958c
|
[
"MIT"
] | 15
|
2019-04-25T02:52:48.000Z
|
2021-12-19T09:35:45.000Z
|
app/home/views.py
|
zhuhaiv5/flask-Purchase_and_sale
|
6fb845da59e4b25737b67d344cbcb4185e93958c
|
[
"MIT"
] | 63
|
2019-04-08T08:25:48.000Z
|
2022-03-27T13:56:11.000Z
|
#-*- coding:utf-8 -*-
# author:Agam
# datetime:2018-11-05
from app.home import home
from flask import render_template
@home.route("/")
def index():
return render_template("home/index.html")
| 17.333333
| 46
| 0.663462
| 29
| 208
| 4.689655
| 0.724138
| 0.205882
| 0.264706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052941
| 0.182692
| 208
| 11
| 47
| 18.909091
| 0.747059
| 0.25
| 0
| 0
| 0
| 0
| 0.112676
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
6bd0fb396443947b041a6bddf52890b8746af370
| 158
|
py
|
Python
|
backend/config/settings/production.py
|
r0tii/process-status-viewer
|
6c94a7a6f5e37f37f63d6140c806a0b6fc49ae1c
|
[
"MIT"
] | null | null | null |
backend/config/settings/production.py
|
r0tii/process-status-viewer
|
6c94a7a6f5e37f37f63d6140c806a0b6fc49ae1c
|
[
"MIT"
] | null | null | null |
backend/config/settings/production.py
|
r0tii/process-status-viewer
|
6c94a7a6f5e37f37f63d6140c806a0b6fc49ae1c
|
[
"MIT"
] | null | null | null |
from .base import * # noqa
from .base import env
# GENERAL
# -------------------------------------------------------------------------
DEBUG = env("DEBUG")
| 22.571429
| 75
| 0.348101
| 12
| 158
| 4.583333
| 0.583333
| 0.290909
| 0.509091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120253
| 158
| 6
| 76
| 26.333333
| 0.395683
| 0.544304
| 0
| 0
| 0
| 0
| 0.073529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6bd383d4d463adbae25feee2fc5688bdf68cdeed
| 123
|
py
|
Python
|
gennav/controllers/__init__.py
|
threewisemonkeys-as/gennav
|
41e86b841a0ce44402f31debc65d5c82109b13a3
|
[
"MIT"
] | null | null | null |
gennav/controllers/__init__.py
|
threewisemonkeys-as/gennav
|
41e86b841a0ce44402f31debc65d5c82109b13a3
|
[
"MIT"
] | null | null | null |
gennav/controllers/__init__.py
|
threewisemonkeys-as/gennav
|
41e86b841a0ce44402f31debc65d5c82109b13a3
|
[
"MIT"
] | null | null | null |
from gennav.controllers.base import Controller # noqa: F401
from gennav.controllers.PID import OmniWheelPID # noqa: F401
| 41
| 61
| 0.804878
| 16
| 123
| 6.1875
| 0.625
| 0.20202
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056075
| 0.130081
| 123
| 2
| 62
| 61.5
| 0.869159
| 0.170732
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e11eaaf24ec9e349fa9533a375fe9fb671c1bf4
| 155
|
py
|
Python
|
runway/tests/handlers/__init__.py
|
paul-duffy/runway
|
a0c22eb7ca7b55df5317bdda92c08c4bb39569d2
|
[
"Apache-2.0"
] | 1
|
2020-02-25T21:08:00.000Z
|
2020-02-25T21:08:00.000Z
|
runway/tests/handlers/__init__.py
|
paul-duffy/runway
|
a0c22eb7ca7b55df5317bdda92c08c4bb39569d2
|
[
"Apache-2.0"
] | 2
|
2020-01-07T15:00:55.000Z
|
2020-01-07T15:03:25.000Z
|
runway/tests/handlers/__init__.py
|
voodooGQ/runway
|
8a744f33b39f1342022f1b57db996bb843e4556c
|
[
"Apache-2.0"
] | null | null | null |
"""Import classes."""
# pylint: disable = wildcard-import
from .cfn_lint import * # noqa
from .script import * # noqa
from .yaml_lint import * # noqa
| 19.375
| 35
| 0.677419
| 20
| 155
| 5.15
| 0.55
| 0.291262
| 0.271845
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 155
| 7
| 36
| 22.142857
| 0.824
| 0.419355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2e1dc349c5ba1c2829ac7de04be9334323678e6e
| 32,827
|
py
|
Python
|
remodet_repository_wdh_part/Projects/PyLib/NetLib/PoseNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
remodet_repository_wdh_part/Projects/PyLib/NetLib/PoseNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
remodet_repository_wdh_part/Projects/PyLib/NetLib/PoseNet.py
|
UrwLee/Remo_experience
|
a59d5b9d6d009524672e415c77d056bc9dd88c72
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import caffe
from caffe import layers as L
from caffe import params as P
from caffe.proto import caffe_pb2
import sys
sys.dont_write_bytecode = True
from VggNet import *
def Pose_Stage1_COCO(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", lr=1, decay=1):
kwargs = {
'param': [dict(lr_mult=lr, decay_mult=decay), dict(lr_mult=2*lr, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
assert from_layer in net.keys()
# L1 & L2 for conv1
net.conv5_1_CPM_L1 = L.Convolution(net[from_layer], num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_1_CPM_L1 = L.ReLU(net.conv5_1_CPM_L1, in_place=True)
net.conv5_1_CPM_L2 = L.Convolution(net[from_layer], num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_1_CPM_L2 = L.ReLU(net.conv5_1_CPM_L2, in_place=True)
# L1 & L2 for conv2
net.conv5_2_CPM_L1 = L.Convolution(net.relu5_1_CPM_L1, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_2_CPM_L1 = L.ReLU(net.conv5_2_CPM_L1, in_place=True)
net.conv5_2_CPM_L2 = L.Convolution(net.relu5_1_CPM_L2, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_2_CPM_L2 = L.ReLU(net.conv5_2_CPM_L2, in_place=True)
# L1 & L2 for conv3
net.conv5_3_CPM_L1 = L.Convolution(net.relu5_2_CPM_L1, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_3_CPM_L1 = L.ReLU(net.conv5_3_CPM_L1, in_place=True)
net.conv5_3_CPM_L2 = L.Convolution(net.relu5_2_CPM_L2, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_3_CPM_L2 = L.ReLU(net.conv5_3_CPM_L2, in_place=True)
# L1 & L2 for conv4
net.conv5_4_CPM_L1 = L.Convolution(net.relu5_3_CPM_L1, num_output=512, pad=0, kernel_size=1, **kwargs)
net.relu5_4_CPM_L1 = L.ReLU(net.conv5_4_CPM_L1, in_place=True)
net.conv5_4_CPM_L2 = L.Convolution(net.relu5_3_CPM_L2, num_output=512, pad=0, kernel_size=1, **kwargs)
net.relu5_4_CPM_L2 = L.ReLU(net.conv5_4_CPM_L2, in_place=True)
# L1 & L2 for conv5
net.conv5_5_CPM_L1 = L.Convolution(net.relu5_4_CPM_L1, num_output=38, pad=0, kernel_size=1, **kwargs)
net.conv5_5_CPM_L2 = L.Convolution(net.relu5_4_CPM_L2, num_output=19, pad=0, kernel_size=1, **kwargs)
# concat layers
fea_layers = []
fea_layers.append(net.conv5_5_CPM_L1)
fea_layers.append(net.conv5_5_CPM_L2)
fea_layers.append(net[from_layer])
net[out_layer] = L.Concat(*fea_layers, axis=1)
return net
def Pose_Stage1_COCO_train(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", lr=1, decay=1):
kwargs = {
'param': [dict(lr_mult=lr, decay_mult=decay), dict(lr_mult=2*lr, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
assert from_layer in net.keys()
# L1 & L2 for conv1
net.conv5_1_CPM_L1 = L.Convolution(net[from_layer], num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_1_CPM_L1 = L.ReLU(net.conv5_1_CPM_L1, in_place=True)
net.conv5_1_CPM_L2 = L.Convolution(net[from_layer], num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_1_CPM_L2 = L.ReLU(net.conv5_1_CPM_L2, in_place=True)
# L1 & L2 for conv2
net.conv5_2_CPM_L1 = L.Convolution(net.relu5_1_CPM_L1, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_2_CPM_L1 = L.ReLU(net.conv5_2_CPM_L1, in_place=True)
net.conv5_2_CPM_L2 = L.Convolution(net.relu5_1_CPM_L2, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_2_CPM_L2 = L.ReLU(net.conv5_2_CPM_L2, in_place=True)
# L1 & L2 for conv3
net.conv5_3_CPM_L1 = L.Convolution(net.relu5_2_CPM_L1, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_3_CPM_L1 = L.ReLU(net.conv5_3_CPM_L1, in_place=True)
net.conv5_3_CPM_L2 = L.Convolution(net.relu5_2_CPM_L2, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu5_3_CPM_L2 = L.ReLU(net.conv5_3_CPM_L2, in_place=True)
# L1 & L2 for conv4
net.conv5_4_CPM_L1 = L.Convolution(net.relu5_3_CPM_L1, num_output=512, pad=0, kernel_size=1, **kwargs)
net.relu5_4_CPM_L1 = L.ReLU(net.conv5_4_CPM_L1, in_place=True)
net.conv5_4_CPM_L2 = L.Convolution(net.relu5_3_CPM_L2, num_output=512, pad=0, kernel_size=1, **kwargs)
net.relu5_4_CPM_L2 = L.ReLU(net.conv5_4_CPM_L2, in_place=True)
# L1 & L2 for conv5
net.conv5_5_CPM_L1 = L.Convolution(net.relu5_4_CPM_L1, num_output=38, pad=0, kernel_size=1, **kwargs)
net.conv5_5_CPM_L2 = L.Convolution(net.relu5_4_CPM_L2, num_output=19, pad=0, kernel_size=1, **kwargs)
# loss_L1 & loss_L2
net.weight_stage1_L1 = L.Eltwise(net.conv5_5_CPM_L1, net[mask_L1], eltwise_param=dict(operation=P.Eltwise.PROD))
net.loss_stage1_L1 = L.EuclideanLoss(net.weight_stage1_L1, net[label_L1], loss_weight=1)
net.weight_stage1_L2 = L.Eltwise(net.conv5_5_CPM_L2, net[mask_L2], eltwise_param=dict(operation=P.Eltwise.PROD))
net.loss_stage1_L2 = L.EuclideanLoss(net.weight_stage1_L2, net[label_L2], loss_weight=1)
# concat layers
fea_layers = []
fea_layers.append(net.conv5_5_CPM_L1)
fea_layers.append(net.conv5_5_CPM_L2)
fea_layers.append(net[from_layer])
net[out_layer] = L.Concat(*fea_layers, concat_param=dict(axis=1))
return net
def Pose_StageX_COCO(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, \
short_cut=True, base_layer="conv4_4_CPM", lr=4, decay=1):
kwargs = {
'param': [dict(lr_mult=lr, decay_mult=decay), dict(lr_mult=2*lr, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
assert from_layer in net.keys()
# L1 & L2 for conv1
conv_L1 = "Mconv1_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[from_layer], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu1_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv1_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[from_layer], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu1_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv2
conv_L1 = "Mconv2_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu2_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv2_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu2_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv3
conv_L1 = "Mconv3_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu3_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv3_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu3_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv4
conv_L1 = "Mconv4_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu4_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv4_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu4_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv5
conv_L1 = "Mconv5_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu5_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv5_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu5_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv6
conv_L1 = "Mconv6_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=0, kernel_size=1, **kwargs)
relu_L1 = "Mrelu6_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv6_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=0, kernel_size=1, **kwargs)
relu_L2 = "Mrelu6_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv7
conv_L1 = "Mconv7_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=38, pad=0, kernel_size=1, **kwargs)
conv_L2 = "Mconv7_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=19, pad=0, kernel_size=1, **kwargs)
# 特征拼接
if short_cut:
fea_layers = []
fea_layers.append(net[conv_L1])
fea_layers.append(net[conv_L2])
assert base_layer in net.keys()
fea_layers.append(net[base_layer])
net[out_layer] = L.Concat(*fea_layers, axis=1)
return net
def Pose_StageX_COCO_train(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="conv4_4_CPM", lr=4, decay=1):
kwargs = {
'param': [dict(lr_mult=lr, decay_mult=decay), dict(lr_mult=2*lr, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
assert from_layer in net.keys()
# L1 & L2 for conv1
conv_L1 = "Mconv1_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[from_layer], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu1_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv1_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[from_layer], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu1_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv2
conv_L1 = "Mconv2_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu2_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv2_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu2_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv3
conv_L1 = "Mconv3_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu3_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv3_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu3_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv4
conv_L1 = "Mconv4_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu4_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv4_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu4_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv5
conv_L1 = "Mconv5_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L1 = "Mrelu5_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv5_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=3, kernel_size=7, **kwargs)
relu_L2 = "Mrelu5_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv6
conv_L1 = "Mconv6_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=128, pad=0, kernel_size=1, **kwargs)
relu_L1 = "Mrelu6_stage{}_L1".format(stage)
net[relu_L1] = L.ReLU(net[conv_L1], in_place=True)
conv_L2 = "Mconv6_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=128, pad=0, kernel_size=1, **kwargs)
relu_L2 = "Mrelu6_stage{}_L2".format(stage)
net[relu_L2] = L.ReLU(net[conv_L2], in_place=True)
# L1 & L2 for conv7
conv_L1 = "Mconv7_stage{}_L1".format(stage)
net[conv_L1] = L.Convolution(net[relu_L1], num_output=38, pad=0, kernel_size=1, **kwargs)
conv_L2 = "Mconv7_stage{}_L2".format(stage)
net[conv_L2] = L.Convolution(net[relu_L2], num_output=19, pad=0, kernel_size=1, **kwargs)
# Loss
weight_L1 = "weight_stage{}_L1".format(stage)
weight_L2 = "weight_stage{}_L2".format(stage)
loss_L1 = "loss_stage{}_L1".format(stage)
loss_L2 = "loss_stage{}_L2".format(stage)
net[weight_L1] = L.Eltwise(net[conv_L1], net[mask_L1], eltwise_param=dict(operation=P.Eltwise.PROD))
net[loss_L1] = L.EuclideanLoss(net[weight_L1], net[label_L1], loss_weight=1)
net[weight_L2] = L.Eltwise(net[conv_L2], net[mask_L2], eltwise_param=dict(operation=P.Eltwise.PROD))
net[loss_L2] = L.EuclideanLoss(net[weight_L2], net[label_L2], loss_weight=1)
# 特征拼接
if short_cut:
fea_layers = []
fea_layers.append(net[conv_L1])
fea_layers.append(net[conv_L2])
assert base_layer in net.keys()
fea_layers.append(net[base_layer])
net[out_layer] = L.Concat(*fea_layers, axis=1)
return net
# Define pre-10 layers of VGG19
def VGG19_PoseNet_COCO_Test(net, from_layer="data", frame_layer="orig_data", **pose_kwargs):
# baseNet-VGG19
assert from_layer in net.keys()
net = VGG19Net_Pre10(net, from_layer="data")
# conv4_3_CPM & conv4_4_CPM
kwargs = {
'param': [dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
# conv4_3_CPM
net.conv4_3_CPM = L.Convolution(net.relu4_2, num_output=256, pad=1, kernel_size=3, **kwargs)
net.relu4_3_CPM = L.ReLU(net.conv4_3_CPM, in_place=True)
net.conv4_4_CPM = L.Convolution(net.relu4_3_CPM, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu4_4_CPM = L.ReLU(net.conv4_4_CPM, in_place=True)
# Stage1
net = Pose_Stage1_COCO(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", lr=1, decay=1)
# Stage2-6
net = Pose_StageX_COCO(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO(net, from_layer="concat_stage3", out_layer="concat_stage4", stage=3, short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO(net, from_layer="concat_stage4", out_layer="concat_stage5", stage=4, short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO(net, from_layer="concat_stage5", out_layer="concat_stage6", stage=5, short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO(net, from_layer="concat_stage6", out_layer="concat_stage7", stage=6, short_cut=False, lr=4, decay=1)
# concat the output layers
feaLayers = []
feaLayers.append(net["Mconv7_stage6_L2"])
feaLayers.append(net["Mconv7_stage6_L1"])
net["concat_stage7"] = L.Concat(*feaLayers, axis=1)
# Resize
resize_kwargs = {
'factor': pose_kwargs.get("resize_factor", 8),
'scale_gap': pose_kwargs.get("resize_scale_gap", 0.3),
'start_scale': pose_kwargs.get("resize_start_scale", 1.0),
}
net.resized_map = L.ImResize(net.concat_stage7, name="resize", imresize_param=resize_kwargs)
# Nms
nms_kwargs = {
'threshold': pose_kwargs.get("nms_threshold", 0.05),
'max_peaks': pose_kwargs.get("nms_max_peaks", 64),
'num_parts': pose_kwargs.get("nms_num_parts", 18),
}
net.joints = L.Nms(net.resized_map, name="nms", nms_param=nms_kwargs)
# ConnectLimbs
connect_kwargs = {
'is_type_coco': pose_kwargs.get("conn_is_type_coco", True),
'max_person': pose_kwargs.get("conn_max_person", 20),
'max_peaks_use': pose_kwargs.get("conn_max_peaks_use", 32),
'iters_pa_cal': pose_kwargs.get("conn_iters_pa_cal", 10),
'connect_inter_threshold': pose_kwargs.get("conn_connect_inter_threshold", 0.05),
'connect_inter_min_nums': pose_kwargs.get("conn_connect_inter_min_nums", 8),
'connect_min_subset_cnt': pose_kwargs.get("conn_connect_min_subset_cnt", 3),
'connect_min_subset_score': pose_kwargs.get("conn_connect_min_subset_score", 0.3),
}
net.limbs = L.Connectlimb(net.resized_map, net.joints, connect_limb_param=connect_kwargs)
# VisualizePose
visual_kwargs = {
'is_type_coco': pose_kwargs.get("conn_is_type_coco", True),
'visualize': pose_kwargs.get("visual_visualize", True),
'draw_skeleton': pose_kwargs.get("visual_draw_skeleton", True),
'print_score': pose_kwargs.get("visual_print_score", False),
'type': pose_kwargs.get("visual_type", P.Visualizepose.POSE),
'part_id': pose_kwargs.get("visual_part_id", 0),
'from_part': pose_kwargs.get("visual_from_part", 0),
'vec_id': pose_kwargs.get("visual_vec_id", 0),
'from_vec': pose_kwargs.get("visual_from_vec", 0),
'pose_threshold': pose_kwargs.get("visual_pose_threshold", 0.05),
'write_frames': pose_kwargs.get("visual_write_frames", False),
'output_directory': pose_kwargs.get("visual_output_directory", ""),
}
net.finished = L.Visualizepose(net[frame_layer], net.resized_map, net.limbs, visualize_pose_param=visual_kwargs)
return net
def VGG19_PoseNet_Stage3_COCO_Test(net, from_layer="data", frame_layer="orig_data", **pose_kwargs):
# baseNet-VGG19
assert from_layer in net.keys()
net = VGG19Net_Pre10(net, from_layer="data")
# conv4_3_CPM & conv4_4_CPM
kwargs = {
'param': [dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
# conv4_3_CPM
net.conv4_3_CPM = L.Convolution(net.relu4_2, num_output=256, pad=1, kernel_size=3, **kwargs)
net.relu4_3_CPM = L.ReLU(net.conv4_3_CPM, in_place=True)
net.conv4_4_CPM = L.Convolution(net.relu4_3_CPM, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu4_4_CPM = L.ReLU(net.conv4_4_CPM, in_place=True)
# Stage1
net = Pose_Stage1_COCO(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", lr=1, decay=1)
# Stage2-6
net = Pose_StageX_COCO(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO(net, from_layer="concat_stage3", out_layer="concat_stage4", stage=3, short_cut=False, lr=4, decay=1)
# concat the output layers
feaLayers = []
feaLayers.append(net["Mconv7_stage3_L2"])
feaLayers.append(net["Mconv7_stage3_L1"])
net["concat_stage4"] = L.Concat(*feaLayers, axis=1)
# Resize
resize_kwargs = {
'factor': pose_kwargs.get("resize_factor", 8),
'scale_gap': pose_kwargs.get("resize_scale_gap", 0.3),
'start_scale': pose_kwargs.get("resize_start_scale", 1.0),
}
net.resized_map = L.ImResize(net.concat_stage4, name="resize", imresize_param=resize_kwargs)
# Nms
nms_kwargs = {
'threshold': pose_kwargs.get("nms_threshold", 0.05),
'max_peaks': pose_kwargs.get("nms_max_peaks", 64),
'num_parts': pose_kwargs.get("nms_num_parts", 18),
}
net.joints = L.Nms(net.resized_map, name="nms", nms_param=nms_kwargs)
# ConnectLimbs
connect_kwargs = {
'is_type_coco': pose_kwargs.get("conn_is_type_coco", True),
'max_person': pose_kwargs.get("conn_max_person", 20),
'max_peaks_use': pose_kwargs.get("conn_max_peaks_use", 32),
'iters_pa_cal': pose_kwargs.get("conn_iters_pa_cal", 10),
'connect_inter_threshold': pose_kwargs.get("conn_connect_inter_threshold", 0.05),
'connect_inter_min_nums': pose_kwargs.get("conn_connect_inter_min_nums", 8),
'connect_min_subset_cnt': pose_kwargs.get("conn_connect_min_subset_cnt", 3),
'connect_min_subset_score': pose_kwargs.get("conn_connect_min_subset_score", 0.3),
}
net.limbs = L.Connectlimb(net.resized_map, net.joints, connect_limb_param=connect_kwargs)
# VisualizePose
visual_kwargs = {
'is_type_coco': pose_kwargs.get("conn_is_type_coco", True),
'type': pose_kwargs.get("visual_type", P.Visualizepose.POSE),
'visualize': pose_kwargs.get("visual_visualize", True),
'draw_skeleton': pose_kwargs.get("visual_draw_skeleton", True),
'print_score': pose_kwargs.get("visual_print_score", False),
'part_id': pose_kwargs.get("visual_part_id", 0),
'from_part': pose_kwargs.get("visual_from_part", 0),
'vec_id': pose_kwargs.get("visual_vec_id", 0),
'from_vec': pose_kwargs.get("visual_from_vec", 0),
'pose_threshold': pose_kwargs.get("visual_pose_threshold", 0.05),
'write_frames': pose_kwargs.get("visual_write_frames", False),
'output_directory': pose_kwargs.get("visual_output_directory", ""),
}
net.finished = L.Visualizepose(net[frame_layer], net.resized_map, net.limbs, visualize_pose_param=visual_kwargs)
return net
def VGG19_PoseNet_COCO_6S_Train(net, data_layer="data", label_layer="label", train=True, **pose_test_kwargs):
# Slice for label and mask
if train:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp = \
L.Slice(net[label_layer], ntop=4, slice_param=dict(slice_point=[38,57,95], axis=1))
else:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp, net.gt = \
L.Slice(net[label_layer], ntop=5, slice_param=dict(slice_point=[38,57,95,114], axis=1))
# Label
net.vec_label = L.Eltwise(net.vec_mask, net.vec_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_label = L.Eltwise(net.heat_mask, net.heat_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
# baseNet-VGG19
net = VGG19Net_Pre10(net, from_layer=data_layer)
# conv4_3_CPM & conv4_4_CPM
kwargs = {
'param': [dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
# conv4_3_CPM
net.conv4_3_CPM = L.Convolution(net.relu4_2, num_output=256, pad=1, kernel_size=3, **kwargs)
net.relu4_3_CPM = L.ReLU(net.conv4_3_CPM, in_place=True)
net.conv4_4_CPM = L.Convolution(net.relu4_3_CPM, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu4_4_CPM = L.ReLU(net.conv4_4_CPM, in_place=True)
# Stage1
net = Pose_Stage1_COCO_train(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", lr=1, decay=1)
# Stage2-6
net = Pose_StageX_COCO_train(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO_train(net, from_layer="concat_stage3", out_layer="concat_stage4", stage=3, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO_train(net, from_layer="concat_stage4", out_layer="concat_stage5", stage=4, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO_train(net, from_layer="concat_stage5", out_layer="concat_stage6", stage=5, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO_train(net, from_layer="concat_stage6", out_layer="concat_stage7", stage=6, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=False, lr=4, decay=1)
# for Test
if not train:
net.vec_out = L.Eltwise(net.vec_mask, net.Mconv7_stage6_L1, eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_out = L.Eltwise(net.heat_mask, net.Mconv7_stage6_L2, eltwise_param=dict(operation=P.Eltwise.PROD))
feaLayers = []
feaLayers.append(net.heat_out)
feaLayers.append(net.vec_out)
net["concat_stage7"] = L.Concat(*feaLayers, axis=1)
# Resize
resize_kwargs = {
'factor': pose_test_kwargs.get("resize_factor", 8),
'scale_gap': pose_test_kwargs.get("resize_scale_gap", 0.3),
'start_scale': pose_test_kwargs.get("resize_start_scale", 1.0),
}
net.resized_map = L.ImResize(net.concat_stage7, name="resize", imresize_param=resize_kwargs)
# Nms
nms_kwargs = {
'threshold': pose_test_kwargs.get("nms_threshold", 0.05),
'max_peaks': pose_test_kwargs.get("nms_max_peaks", 64),
'num_parts': pose_test_kwargs.get("nms_num_parts", 18),
}
net.joints = L.Nms(net.resized_map, name="nms", nms_param=nms_kwargs)
# ConnectLimbs
connect_kwargs = {
'is_type_coco': pose_test_kwargs.get("conn_is_type_coco", True),
'max_person': pose_test_kwargs.get("conn_max_person", 20),
'max_peaks_use': pose_test_kwargs.get("conn_max_peaks_use", 32),
'iters_pa_cal': pose_test_kwargs.get("conn_iters_pa_cal", 10),
'connect_inter_threshold': pose_test_kwargs.get("conn_connect_inter_threshold", 0.05),
'connect_inter_min_nums': pose_test_kwargs.get("conn_connect_inter_min_nums", 8),
'connect_min_subset_cnt': pose_test_kwargs.get("conn_connect_min_subset_cnt", 3),
'connect_min_subset_score': pose_test_kwargs.get("conn_connect_min_subset_score", 0.3),
}
net.limbs = L.Connectlimb(net.resized_map, net.joints, connect_limb_param=connect_kwargs)
# Eval
eval_kwargs = {
'stride': 8,
'area_thre': pose_test_kwargs.get("eval_area_thre", 96*96),
'eval_iters': pose_test_kwargs.get("eval_test_iters", 10000),
'oks_thre': pose_test_kwargs.get("eval_oks_thre", [0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9]),
}
net.eval = L.PoseEval(net.limbs, net.gt, pose_eval_param=eval_kwargs)
return net
def VGG19_PoseNet_COCO_3S_Train(net, data_layer="data", label_layer="label", train=True, **pose_test_kwargs):
# Slice for label and mask
if train:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp = \
L.Slice(net[label_layer], ntop=4, slice_param=dict(slice_point=[38,57,95], axis=1))
else:
net.vec_mask, net.heat_mask, net.vec_temp, net.heat_temp, net.gt = \
L.Slice(net[label_layer], ntop=5, slice_param=dict(slice_point=[38,57,95,114], axis=1))
# Label
net.vec_label = L.Eltwise(net.vec_mask, net.vec_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_label = L.Eltwise(net.heat_mask, net.heat_temp, eltwise_param=dict(operation=P.Eltwise.PROD))
# baseNet-VGG19
net = VGG19Net_Pre10(net, from_layer=data_layer)
# conv4_3_CPM & conv4_4_CPM
kwargs = {
'param': [dict(lr_mult=1, decay_mult=1), dict(lr_mult=2, decay_mult=0)],
'weight_filler': dict(type='gaussian', std=0.01),
'bias_filler': dict(type='constant', value=0)}
# conv4_3_CPM
net.conv4_3_CPM = L.Convolution(net.relu4_2, num_output=256, pad=1, kernel_size=3, **kwargs)
net.relu4_3_CPM = L.ReLU(net.conv4_3_CPM, in_place=True)
net.conv4_4_CPM = L.Convolution(net.relu4_3_CPM, num_output=128, pad=1, kernel_size=3, **kwargs)
net.relu4_4_CPM = L.ReLU(net.conv4_4_CPM, in_place=True)
# Stage1
net = Pose_Stage1_COCO_train(net, from_layer="relu4_4_CPM", out_layer="concat_stage2", \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", lr=1, decay=1)
# Stage2-3
net = Pose_StageX_COCO_train(net, from_layer="concat_stage2", out_layer="concat_stage3", stage=2, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=True, base_layer="relu4_4_CPM", lr=4, decay=1)
net = Pose_StageX_COCO_train(net, from_layer="concat_stage3", out_layer="concat_stage4", stage=3, \
mask_L1="vec_mask", mask_L2="heat_mask", \
label_L1="vec_label", label_L2="heat_label", \
short_cut=False, lr=4, decay=1)
# for Test
if not train:
net.vec_out = L.Eltwise(net.vec_mask, net.Mconv7_stage3_L1, eltwise_param=dict(operation=P.Eltwise.PROD))
net.heat_out = L.Eltwise(net.heat_mask, net.Mconv7_stage3_L2, eltwise_param=dict(operation=P.Eltwise.PROD))
feaLayers = []
feaLayers.append(net.heat_out)
feaLayers.append(net.vec_out)
net["concat_stage4"] = L.Concat(*feaLayers, axis=1)
# Resize
resize_kwargs = {
'factor': pose_test_kwargs.get("resize_factor", 8),
'scale_gap': pose_test_kwargs.get("resize_scale_gap", 0.3),
'start_scale': pose_test_kwargs.get("resize_start_scale", 1.0),
}
net.resized_map = L.ImResize(net.concat_stage4, name="resize", imresize_param=resize_kwargs)
# Nms
nms_kwargs = {
'threshold': pose_test_kwargs.get("nms_threshold", 0.05),
'max_peaks': pose_test_kwargs.get("nms_max_peaks", 64),
'num_parts': pose_test_kwargs.get("nms_num_parts", 18),
}
net.joints = L.Nms(net.resized_map, name="nms", nms_param=nms_kwargs)
# ConnectLimbs
connect_kwargs = {
'is_type_coco': pose_test_kwargs.get("conn_is_type_coco", True),
'max_person': pose_test_kwargs.get("conn_max_person", 20),
'max_peaks_use': pose_test_kwargs.get("conn_max_peaks_use", 32),
'iters_pa_cal': pose_test_kwargs.get("conn_iters_pa_cal", 10),
'connect_inter_threshold': pose_test_kwargs.get("conn_connect_inter_threshold", 0.05),
'connect_inter_min_nums': pose_test_kwargs.get("conn_connect_inter_min_nums", 8),
'connect_min_subset_cnt': pose_test_kwargs.get("conn_connect_min_subset_cnt", 3),
'connect_min_subset_score': pose_test_kwargs.get("conn_connect_min_subset_score", 0.3),
}
net.limbs = L.Connectlimb(net.resized_map, net.joints, connect_limb_param=connect_kwargs)
# Eval
eval_kwargs = {
'stride': 8,
'area_thre': pose_test_kwargs.get("eval_area_thre", 96*96),
'oks_thre': pose_test_kwargs.get("eval_oks_thre", [0.5,0.55,0.6,0.65,0.7,0.75,0.8,0.85,0.9]),
}
net.eval = L.PoseEval(net.limbs, net.gt, pose_eval_param=eval_kwargs)
return net
| 57.794014
| 152
| 0.671063
| 5,211
| 32,827
| 3.883324
| 0.041067
| 0.037804
| 0.04151
| 0.02965
| 0.977861
| 0.966495
| 0.964519
| 0.961109
| 0.960269
| 0.952659
| 0
| 0.056064
| 0.18497
| 32,827
| 567
| 153
| 57.895944
| 0.70028
| 0.0329
| 0
| 0.86875
| 0
| 0
| 0.163972
| 0.028292
| 0
| 0
| 0
| 0
| 0.016667
| 1
| 0.016667
| false
| 0
| 0.014583
| 0
| 0.047917
| 0.004167
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2e9028cb534c7494f90383884ebc8428e472835a
| 8,114
|
py
|
Python
|
symphony/cli/tests/pyinventory_tests/grpc/rpc_pb2_grpc.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | 2
|
2020-11-05T18:58:26.000Z
|
2021-02-09T06:42:49.000Z
|
symphony/cli/tests/pyinventory_tests/grpc/rpc_pb2_grpc.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | 10
|
2021-03-31T20:19:00.000Z
|
2022-02-19T07:09:57.000Z
|
symphony/cli/tests/pyinventory_tests/grpc/rpc_pb2_grpc.py
|
idoshveki/magma
|
8022267bd8b8d94913fbb9a0836880361d785446
|
[
"BSD-3-Clause"
] | 3
|
2020-08-20T18:45:34.000Z
|
2020-08-20T20:18:42.000Z
|
#!/usr/bin/env python3
# pyre-ignore-all-errors
# @generated AUTOGENERATED file. Do not Change!
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
import rpc_pb2 as rpc__pb2
class TenantServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/graph.TenantService/Create',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString,
response_deserializer=rpc__pb2.Tenant.FromString,
)
self.List = channel.unary_unary(
'/graph.TenantService/List',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=rpc__pb2.TenantList.FromString,
)
self.Get = channel.unary_unary(
'/graph.TenantService/Get',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString,
response_deserializer=rpc__pb2.Tenant.FromString,
)
self.Truncate = channel.unary_unary(
'/graph.TenantService/Truncate',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.Delete = channel.unary_unary(
'/graph.TenantService/Delete',
request_serializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class TenantServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def Create(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def List(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Get(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Truncate(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_TenantServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString,
response_serializer=rpc__pb2.Tenant.SerializeToString,
),
'List': grpc.unary_unary_rpc_method_handler(
servicer.List,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=rpc__pb2.TenantList.SerializeToString,
),
'Get': grpc.unary_unary_rpc_method_handler(
servicer.Get,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString,
response_serializer=rpc__pb2.Tenant.SerializeToString,
),
'Truncate': grpc.unary_unary_rpc_method_handler(
servicer.Truncate,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=google_dot_protobuf_dot_wrappers__pb2.StringValue.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'graph.TenantService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class UserServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Create = channel.unary_unary(
'/graph.UserService/Create',
request_serializer=rpc__pb2.AddUserInput.SerializeToString,
response_deserializer=rpc__pb2.User.FromString,
)
self.Delete = channel.unary_unary(
'/graph.UserService/Delete',
request_serializer=rpc__pb2.UserInput.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class UserServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def Create(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Delete(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_UserServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Create': grpc.unary_unary_rpc_method_handler(
servicer.Create,
request_deserializer=rpc__pb2.AddUserInput.FromString,
response_serializer=rpc__pb2.User.SerializeToString,
),
'Delete': grpc.unary_unary_rpc_method_handler(
servicer.Delete,
request_deserializer=rpc__pb2.UserInput.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'graph.UserService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class ActionsAlertServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Trigger = channel.unary_unary(
'/graph.ActionsAlertService/Trigger',
request_serializer=rpc__pb2.AlertPayload.SerializeToString,
response_deserializer=rpc__pb2.ExecutionResult.FromString,
)
class ActionsAlertServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def Trigger(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ActionsAlertServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Trigger': grpc.unary_unary_rpc_method_handler(
servicer.Trigger,
request_deserializer=rpc__pb2.AlertPayload.FromString,
response_serializer=rpc__pb2.ExecutionResult.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'graph.ActionsAlertService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 36.54955
| 95
| 0.749445
| 874
| 8,114
| 6.625858
| 0.10984
| 0.027974
| 0.052841
| 0.062165
| 0.84355
| 0.781903
| 0.76757
| 0.72181
| 0.72181
| 0.690209
| 0
| 0.005813
| 0.173158
| 8,114
| 221
| 96
| 36.714932
| 0.857356
| 0.133473
| 0
| 0.585987
| 1
| 0
| 0.099296
| 0.034631
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089172
| false
| 0.089172
| 0.025478
| 0
| 0.152866
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
cf01fca2d9349ce6aab8f7e6b714d39c337e8ef4
| 168
|
py
|
Python
|
mogp_emulator/linalg/__init__.py
|
EXAUQ/mogp-emulator
|
9d5772135498bdf5b95b44b4afb065c2c266f899
|
[
"MIT"
] | null | null | null |
mogp_emulator/linalg/__init__.py
|
EXAUQ/mogp-emulator
|
9d5772135498bdf5b95b44b4afb065c2c266f899
|
[
"MIT"
] | null | null | null |
mogp_emulator/linalg/__init__.py
|
EXAUQ/mogp-emulator
|
9d5772135498bdf5b95b44b4afb065c2c266f899
|
[
"MIT"
] | null | null | null |
from mogp_emulator.linalg.cholesky import cholesky_factor
from mogp_emulator.linalg.linalg_utils import calc_Ainv, calc_A_deriv, calc_mean_params, calc_R, logdet_deriv
| 56
| 109
| 0.880952
| 27
| 168
| 5.074074
| 0.592593
| 0.116788
| 0.233577
| 0.321168
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 168
| 2
| 110
| 84
| 0.878205
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cf37bcfd987f5e4b93151e9df21128ba5b1e0486
| 221
|
py
|
Python
|
codewars/7kyu/amrlotfy77/Sum of Triangular Numbers/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | null | null | null |
codewars/7kyu/amrlotfy77/Sum of Triangular Numbers/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 2
|
2019-01-22T10:53:42.000Z
|
2019-01-31T08:02:48.000Z
|
codewars/7kyu/amrlotfy77/Sum of Triangular Numbers/test_bench.py
|
ictcubeMENA/Training_one
|
dff6bee96ba42babe4888e5cf9a9448a6fd93fc3
|
[
"MIT"
] | 13
|
2019-01-22T10:37:42.000Z
|
2019-01-25T13:30:43.000Z
|
from main import sum_triangular_numbers, sum_triangular_numbers1
def test1(benchmark):
assert benchmark(sum_triangular_numbers, 6) == 56
def test(benchmark):
assert benchmark(sum_triangular_numbers1, 6) == 56
| 22.1
| 64
| 0.782805
| 29
| 221
| 5.689655
| 0.482759
| 0.315152
| 0.242424
| 0.327273
| 0.448485
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047368
| 0.140271
| 221
| 9
| 65
| 24.555556
| 0.821053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.4
| false
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
cf7d159f30cbb57dd196bf0e53457d87edd76e02
| 4,399
|
py
|
Python
|
short-read-mngs/test/non_host_alignment/test_RunAlignment.py
|
truwl/idseq-workflows
|
d9b8f5af8a285b4bfed6bf8f7dc4b3ccf40c8a6d
|
[
"MIT"
] | 30
|
2020-05-23T21:23:38.000Z
|
2022-03-24T17:18:47.000Z
|
short-read-mngs/test/non_host_alignment/test_RunAlignment.py
|
grunwaldlab/idseq-workflows
|
cacfaa02f014ba06b8fb69e62911ab7fd5d88d9a
|
[
"MIT"
] | 65
|
2020-05-27T14:21:26.000Z
|
2021-11-18T17:58:56.000Z
|
short-read-mngs/test/non_host_alignment/test_RunAlignment.py
|
grunwaldlab/idseq-workflows
|
cacfaa02f014ba06b8fb69e62911ab7fd5d88d9a
|
[
"MIT"
] | 12
|
2020-08-24T12:00:28.000Z
|
2022-02-03T08:28:02.000Z
|
import os
import json
import csv
import tempfile
def test_RunAlignmentBlacklist(util, short_read_mngs_bench3_viral_outputs):
task_name = "RunAlignment_gsnap_out"
# load the task's inputs from the end-to-end workflow test
inputs, _ = util.miniwdl_inputs_outputs(
os.path.join(
short_read_mngs_bench3_viral_outputs["dir"],
"call-non_host_alignment",
f"call-{task_name}",
)
)
outp = util.miniwdl_run(
util.repo_dir() / "short-read-mngs/non_host_alignment.wdl",
"--task",
task_name,
"-i",
json.dumps(inputs),
)
with open(os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_hitsummary_tab"])) as f:
taxids = set(row[2] for row in csv.reader(f, delimiter="\t"))
assert "37124" in taxids, "taxid should be in hitsummary unless filtered out"
assert "1273712" in taxids, "taxid should be in hitsummary unless filtered out"
with tempfile.NamedTemporaryFile(prefix=os.path.dirname(__file__), mode="w") as blacklist_file:
blacklist_file.writelines(["37124\n", "1273712\n"])
blacklist_file.seek(0)
blacklist_file.writelines
inputs["taxon_blacklist"] = blacklist_file.name
outp = util.miniwdl_run(
util.repo_dir() / "short-read-mngs/non_host_alignment.wdl",
"--task",
task_name,
"-i",
json.dumps(inputs),
)
hitsummary = os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_hitsummary_tab"])
deduped = os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_deduped_m8"])
with open(hitsummary) as f:
taxids = set(row[2] for row in csv.reader(f, delimiter="\t"))
assert "37124" not in taxids, "taxid should be filtered out"
assert "1273712" not in taxids, "taxid should be filtered out"
with open(hitsummary) as hf, open(deduped) as df:
rows = zip(csv.reader(hf, delimiter="\t"), csv.reader(df, delimiter="\t"))
assert all(
hrow[0] == drow[0] for hrow, drow in rows
), "hitsummary and deduped output should be aligned"
def test_RunAlignmentDeuterostomeFilter(util, short_read_mngs_bench3_viral_outputs):
task_name = "RunAlignment_gsnap_out"
# load the task's inputs from the end-to-end workflow test
inputs, _ = util.miniwdl_inputs_outputs(
os.path.join(
short_read_mngs_bench3_viral_outputs["dir"],
"call-non_host_alignment",
f"call-{task_name}",
)
)
outp = util.miniwdl_run(
util.repo_dir() / "short-read-mngs/non_host_alignment.wdl",
"--task",
task_name,
"-i",
json.dumps(inputs),
)
with open(os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_hitsummary_tab"])) as f:
taxids = set(row[2] for row in csv.reader(f, delimiter="\t"))
assert "37124" in taxids, "taxid should be in hitsummary unless filtered out"
assert "1273712" in taxids, "taxid should be in hitsummary unless filtered out"
with tempfile.NamedTemporaryFile(
prefix=os.path.dirname(__file__), mode="w"
) as deuterostome_file:
deuterostome_file.writelines(["37124\n", "1273712\n"])
deuterostome_file.seek(0)
inputs["deuterostome_db"] = deuterostome_file.name
inputs["use_deuterostome_filter"] = True
outp = util.miniwdl_run(
util.repo_dir() / "short-read-mngs/non_host_alignment.wdl",
"--task",
task_name,
"-i",
json.dumps(inputs),
)
hitsummary = os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_hitsummary_tab"])
deduped = os.path.join(outp["dir"], outp["outputs"][f"{task_name}.gsnap_deduped_m8"])
with open(hitsummary) as f:
taxids = set(row[2] for row in csv.reader(f, delimiter="\t"))
assert "37124" not in taxids, "taxid should be filtered out"
assert "1273712" not in taxids, "taxid should be filtered out"
with open(hitsummary) as hf, open(deduped) as df:
rows = zip(csv.reader(hf, delimiter="\t"), csv.reader(df, delimiter="\t"))
assert all(
hrow[0] == drow[0] for hrow, drow in rows
), "hitsummary and deduped output should be aligned"
| 37.598291
| 100
| 0.621278
| 574
| 4,399
| 4.58885
| 0.167247
| 0.042521
| 0.039484
| 0.057707
| 0.877752
| 0.877752
| 0.856492
| 0.856492
| 0.856492
| 0.856492
| 0
| 0.026691
| 0.250511
| 4,399
| 116
| 101
| 37.922414
| 0.772217
| 0.025688
| 0
| 0.717391
| 0
| 0
| 0.258931
| 0.104833
| 0
| 0
| 0
| 0
| 0.108696
| 1
| 0.021739
| false
| 0
| 0.043478
| 0
| 0.065217
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d8469748f473355713a9f873a481b710c52f9a0a
| 954
|
py
|
Python
|
COS120/EXAMPLES/turtle.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
COS120/EXAMPLES/turtle.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
COS120/EXAMPLES/turtle.py
|
thejayhaykid/Python
|
641c33b94762f0cace203dcf4cc121571625ab02
|
[
"MIT"
] | null | null | null |
import cTurtle
t=cTurtle.Turtle()
def drawOctogon():
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.forward(50)
t.right(45)
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
t.right(15)
drawOctogon()
| 13.068493
| 18
| 0.671908
| 151
| 954
| 4.245033
| 0.072848
| 0.299532
| 0.299532
| 0.711388
| 0.936037
| 0.936037
| 0.936037
| 0.936037
| 0.936037
| 0.936037
| 0
| 0.098039
| 0.144654
| 954
| 72
| 19
| 13.25
| 0.6875
| 0
| 0
| 0.955224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014925
| false
| 0
| 0.014925
| 0
| 0.029851
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
d86b08e98218ef589f031758939a632f27e23a1b
| 126
|
py
|
Python
|
json_content_validator/__init__.py
|
daka83/json_content_validator
|
abfef913861842532abeb32f9cb76322bf9dfbe9
|
[
"MIT"
] | 1
|
2019-03-13T15:52:49.000Z
|
2019-03-13T15:52:49.000Z
|
json_content_validator/__init__.py
|
daka83/json_content_validator
|
abfef913861842532abeb32f9cb76322bf9dfbe9
|
[
"MIT"
] | null | null | null |
json_content_validator/__init__.py
|
daka83/json_content_validator
|
abfef913861842532abeb32f9cb76322bf9dfbe9
|
[
"MIT"
] | null | null | null |
from json_content_validator.validators import *
from json_content_validator.json_content_validator import JSONContentValidator
| 63
| 78
| 0.920635
| 15
| 126
| 7.333333
| 0.466667
| 0.3
| 0.545455
| 0.436364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 126
| 2
| 78
| 63
| 0.92437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
2b2067d8f7d4a88437c2f76271ee976118f4cb3e
| 52,480
|
py
|
Python
|
Economia/economia.py
|
Williancc1557/bot-python
|
c2fe9718ad07b3bfa5f3bb218cbe509f95b98027
|
[
"MIT"
] | null | null | null |
Economia/economia.py
|
Williancc1557/bot-python
|
c2fe9718ad07b3bfa5f3bb218cbe509f95b98027
|
[
"MIT"
] | null | null | null |
Economia/economia.py
|
Williancc1557/bot-python
|
c2fe9718ad07b3bfa5f3bb218cbe509f95b98027
|
[
"MIT"
] | null | null | null |
import discord
import asyncio
import discord
import asyncio
from discord.ext import commands
import random
from discord.ext.commands.cooldowns import BucketType
import datetime
import psycopg2
from Principais.principais import bot, mydb, cursor
bot = bot
mydb = mydb
cursor = cursor
async def Criar_conta(ctx):
try:
d = await ctx.channel.send(embed=discord.Embed(title=':floppy_disk: Escreva o nome da conta: '))
def check(p):
return p.author == ctx.author and p.channel == ctx.channel
try:
msg1 = await bot.wait_for('message', timeout=1000, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send('**Acabou O Tempo**')
else:
nome = str(msg1.content).strip()
await d.delete()
await msg1.delete()
a = await ctx.channel.send(embed=discord.Embed(title=':clipboard: Escreva a descrição da conta: '))
def check(p):
return p.author == ctx.author and p.channel == ctx.channel
try:
msg2 = await bot.wait_for('message', timeout=1000, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send('**Acabou O Tempo**')
else:
descriçao = str(msg2.content)
await a.delete()
await msg2.delete()
def check(p):
return p.author == ctx.author and p.channel == ctx.channel
pessoa = ctx.author.id
sqlinsert = f'SELECT descrição FROM dinheiro WHERE id = {pessoa}'
cursor.execute(sqlinsert)
valores_lidos = cursor.fetchone()
arma = -1
try:
print(valores_lidos[0])
await ctx.channel.send('<:error:788824695184424980> **Ops, Parece que você já possui um registro!!**')
return
except:
dindin = 200
cor = '000000'
inserir = 'INSERT INTO dinheiro (id, nome, descrição, dinheiro, cor, arma) VALUES (%s, %s, %s, %s, %s, %s)'
dados = (pessoa, nome, descriçao, dindin, cor, arma)
cursor.execute(inserir, dados)
mydb.commit()
msg1 = await ctx.channel.send(embed=discord.Embed(title='<a:loading:785523240944664646> Criando conta ',
color=0xFF69B4))
await asyncio.sleep(4)
await msg1.edit(embed=discord.Embed(title=':white_check_mark: **Conta criada**',
color=0x00FF00))
except:
await ctx.channel.send(f'<:error:788824695184424980> Ops, parece que utilizou caracteres diferente desses: **a, b, c** no seu nome, ou escreveu um nome com mais de 30 caractéres! {ctx.author.mention}')
async def Conta(ctx, member: discord.Member = None):
pessoa = ctx.author.id
if not member:
try:
sqlinsert = f'SELECT descrição FROM dinheiro WHERE id = {pessoa}'
cursor.execute(sqlinsert)
valores_lidos = cursor.fetchone()
sqlinsert1 = f'SELECT dinheiro FROM dinheiro WHERE id = {pessoa}'
cursor.execute(sqlinsert1)
valores_lidos1 = cursor.fetchone()
sqlinsert2 = f'SELECT nome FROM dinheiro WHERE id = {pessoa}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
sqlinsert3 = f'SELECT cor FROM dinheiro WHERE id = {pessoa}'
cursor.execute(sqlinsert3)
valores_lidos3 = cursor.fetchone()
v = 0x0 + int(valores_lidos3[0])
sqlinsert4 = f'select id from dinheiro ORDER BY dinheiro desc'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchall()
for index, element in enumerate(valores_lidos4):
if element[0] == pessoa:
num = index + 1
break
embed = discord.Embed(
title='Sua conta no fênix: ',
description=f'ㅤ\n**Discord name:** {ctx.author.name}\n\n**Nome da conta:** {valores_lidos2[0]}\n**Descrição:** {valores_lidos[0]}\n**Dinheiro:** {valores_lidos1[0]} *fenicoins*',
color=v)
embed.set_footer(text=f'Você está em #{num} lugar no ranking')
embed.set_thumbnail(
url='https://media.discordapp.net/attachments/788064370722340885/811621833484140555/edificio-de-banco-retro-dos-desenhos-animados-ou-tribunal-com-ilustracao-de-colunas-isolada-no-branc.png')
embed.set_author(name=ctx.author.name, icon_url=str(ctx.author.avatar_url))
await ctx.reply(embed=embed)
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
else:
membro = member.id
sqlinsert = f'SELECT descrição FROM dinheiro WHERE id = {membro}'
cursor.execute(sqlinsert)
valores_lidos = cursor.fetchone()
sqlinsert1 = f'SELECT dinheiro FROM dinheiro WHERE id = {membro}'
cursor.execute(sqlinsert1)
valores_lidos1 = cursor.fetchone()
sqlinsert2 = f'SELECT nome FROM dinheiro WHERE id = {membro}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
sqlinsert3 = f'SELECT cor FROM dinheiro WHERE id = {member.id}'
cursor.execute(sqlinsert3)
valores_lidos3 = cursor.fetchone()
sqlinsert4 = f'select id from dinheiro ORDER BY dinheiro desc'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchall()
for index, element in enumerate(valores_lidos4):
if element[0] == member.id:
num1 = index + 1
break
try:
embed1 = discord.Embed(
title=f'Conta do {member.name} no fênix: ',
description=f'ㅤ\n**Discord name:** {member.name}\n\n**Nome da conta:** {valores_lidos2[0]}\n**Descrição:** {valores_lidos[0]}\n**Dinheiro:** {valores_lidos1[0]} *fenicoins*',
color=0x0 + int(valores_lidos3[0]))
embed1.set_footer(text=f'{member} está em #{num1} no ranking')
embed1.set_thumbnail(
url='https://media.discordapp.net/attachments/788064370722340885/811621833484140555/edificio-de-banco-retro-dos-desenhos-animados-ou-tribunal-com-ilustracao-de-colunas-isolada-no-branc.png')
embed1.set_author(name=member.name, icon_url=str(member.avatar_url))
await ctx.channel.send(embed=embed1)
except:
await ctx.channel.send(embed=discord.Embed(
title='<:error:788824695184424980> ** Este usuário não possui uma conta no fênix!!**',
color=0xFF0000))
async def Desc_edit(ctx):
pessoa = ctx.author.id
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
try:
print(sqlinsert2)
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
return
if valores_lidos2[0] >= 5000:
a = await ctx.channel.send(ctx.author.mention, embed=discord.Embed(
title=f'Tem certeza que deseja **mudar a sua descrição** por **5000 de *fenicoins* ** ? Sim ou Não',
color=0xffff00))
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
'***Demorou De Mais Para Aceitar.***')
else:
resposta25 = str(msg25.content).lower()
if resposta25 == 'sim':
await a.delete()
d = await ctx.channel.send('**Escreva a sua nova descrição:** ')
def check(p):
return p.author == ctx.author and p.channel == ctx.channel
try:
msg2 = await bot.wait_for('message', timeout=1000, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send('**Acabou O Tempo**')
else:
adescriçao = str(msg2.content)
sqlinsert = f"UPDATE dinheiro SET descrição = '{adescriçao}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
dinheiro = 5000
await d.delete()
await msg2.delete()
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - dinheiro}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
editar1 = await ctx.channel.send(
f'<a:loading:785523240944664646> **Editando descrição**')
await asyncio.sleep(4)
await editar1.edit(content=':white_check_mark: **Descrição editada**')
else:
a.delete()
await ctx.channel.send('<:error:788824695184424980> Compra cancelada')
else:
await ctx.channel.send('<:error:788824695184424980> Você não possui **5000 *fenicoins* **')
async def Transferir(ctx, member: discord.Member = None, dinheiro: int = 10):
if not member or not dinheiro:
await ctx.channel.send('<:error:788824695184424980> **Rescreva o comando no seguinte formato:** `f!transferir (@user) (valor)`')
return
pessoa = ctx.author.id
membro = member.id
if 0 < dinheiro:
try:
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
print(valores_lidos2[0])
except:
await ctx.channel.send(embed=discord.Embed(title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
return
if valores_lidos2[0] >= dinheiro:
await ctx.channel.send(embed=discord.Embed(title=f'Tem certeza que **deseja transferir** {dinheiro} *fenicoins* para {member} ? Sim ou Não',
color=0xffff00))
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
'***Demorou De Mais Para Aceitar.***')
else:
resposta25 = str(msg25.content).lower()
if resposta25 == 'sim':
try:
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - dinheiro}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert3 = f'SELECT dinheiro FROM dinheiro WHERE id = {membro}'
cursor.execute(sqlinsert3)
valores_lidos3 = cursor.fetchone()
print(valores_lidos3[0])
sqlinsert1 = f"UPDATE dinheiro SET dinheiro = '{valores_lidos3[0] + dinheiro}' WHERE id = {membro}"
cursor.execute(sqlinsert1)
mydb.commit()
await ctx.channel.send(f':white_check_mark: {ctx.author.mention} transferiu **{dinheiro} *fenicoins* ** para {member.mention} com sucesso!!')
except:
await ctx.channel.send('<:error:788824695184424980> ** Esse usuário não possui uma conta no fênix!!**')
else:
await ctx.channel.send('<:error:788824695184424980> Transferência cancelada!')
else:
await ctx.message.delete()
await ctx.channel.send(f'<:error:788824695184424980> Olá {ctx.author.mention}, Você não possui **saldo suficiente** para a transferência!')
else:
await ctx.message.delete()
await ctx.channel.send(f'<:error:788824695184424980> Ue {ctx.author.mention}, vai transferir **dinheiro negativo**? k k k')
async def Diaria(ctx):
pessoa = ctx.author.id
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
try:
print(valores_lidos2[0])
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
num = random.randint(500, 7000)
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] + num}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
embed = discord.Embed(title=f'<a:giveway:815050719803211786> Parabéns Você recebeu: {num} *fenicoins*',
color=0xffae00)
await ctx.channel.send(embed=embed)
async def Cor(ctx):
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
try:
print(valores_lidos2[0])
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
return
a = await ctx.channel.send('**Oque você deseja?\n'
'[ 1 ] Mudar a cor para a padrão *(gratis)*\n'
'[ 2 ] Mudar de cor por 10000 *fenicoins* \n'
'[ 3 ] Cancelar**')
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
'***Demorou De Mais Para Aceitar.***')
else:
resposta25 = str(msg25.content).lower()
if resposta25 == '1':
pessoa = ctx.author.id
sqlinsert = f"UPDATE dinheiro SET cor = '000000' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(embed=discord.Embed(title=':white_check_mark: Cor alterada com sucesso!',
color=0x00FF00))
elif resposta25 == '2':
await msg25.delete()
await a.delete()
if valores_lidos2[0] >= 10000:
a = await ctx.channel.send('**Qual cor você deseja?\n'
'[ 1 ] Vinho\n'
'[ 2 ] Verde escuro\n'
'[ 3 ] Azul escuro **')
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg26 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
'***Demorou De Mais Para Aceitar.***')
else:
resposta26 = str(msg26.content).lower()
if resposta26 == '3':
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 10000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
pessoa = ctx.author.id
sqlinsert = f"UPDATE dinheiro SET cor = '000080' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(embed=discord.Embed(title=':white_check_mark: Cor alterada com sucesso!',
color=0x00FF00))
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 10000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
elif resposta26 == '2':
pessoa = ctx.author.id
sqlinsert = f"UPDATE dinheiro SET cor = '800000' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(embed=discord.Embed(title=':white_check_mark: Cor alterada com sucesso!',
color=0x00FF00))
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 10000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
elif resposta26 == '1':
pessoa = ctx.author.id
sqlinsert = f"UPDATE dinheiro SET cor = '000080' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(embed=discord.Embed(title=':white_check_mark: Cor alterada com sucesso!',
color=0x00FF00))
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 10000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
else:
await ctx.channel.send(
f'<:error:788824695184424980> Olá {ctx.author.mention}, Você não possui **saldo suficiente** para a compra!')
else:
await ctx.channel.send(
f'**<:error:788824695184424980> Olá {ctx.author.mention}, a sua alteração de cor foi cancelada**')
async def Roubar(ctx, member: discord.Member = None):
pessoa = ctx.author.id
if not member:
await ctx.channel.send('<:error:788824695184424980> **Mencione quem você deseja roubar**')
ctx.command.reset_cooldown(ctx)
return
membro = member.id
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
sqlinsert3 = f'SELECT dinheiro FROM dinheiro WHERE id = {membro}'
cursor.execute(sqlinsert3)
valores_lidos3 = cursor.fetchone()
sqlinsert4 = f'SELECT arma FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchone()
print(valores_lidos4)
if ctx.author.id == member.id:
await ctx.channel.send(embed=discord.Embed(title='<:error:788824695184424980> Você não pode roubar si mesmo!',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
else:
a = await ctx.channel.send(
embed=discord.Embed(title=f'<a:loading:785523240944664646> Você está tentando roubar {member}',
color=0xFF6347))
await asyncio.sleep(4)
num = random.randint(-5000, 6500)
if num > 0:
try:
if valores_lidos3[0] >= 7000:
if valores_lidos2[0] >= 7000:
if 0 < valores_lidos4[0] <= 7:
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
else:
await a.edit(
embed=discord.Embed(title='<:error:788824695184424980> Você não possui uma arma',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] + num}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos3[0] - num}' WHERE id = {member.id}"
cursor.execute(sqlinsert)
mydb.commit()
await a.edit(embed=discord.Embed(
title=f':white_check_mark: Você roubou {num} *fenicoins* do(a) {member}',
color=0x00FF00))
await member.send(
f'Ou, parecer que o `{ctx.author}` acabou de roubar você **`{num} fenicoins`**. **Eu não deixaria sair barato!!**')
await asyncio.sleep(2)
if valores_lidos4[0] == 1:
await ctx.channel.send(
embed=discord.Embed(title=f'Após roubar {member.name}, a sua arma quebra! ***track***',
color=0xFF0000))
else:
await a.edit(embed=discord.Embed(
title=f'**<:error:788824695184424980> Para poder roubar, você {ctx.author.name} precisa de no mínimo 7000 *fenicoins* **',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
else:
await a.edit(embed=discord.Embed(
title=f'**<:error:788824695184424980> Olá {ctx.author.name}, eu estou protegendo o(a) {member.name} até ele(a) conseguir 7000 *fenicoins* **',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
except:
await a.edit(embed=discord.Embed(
title=f'<:error:788824695184424980> ** Ocorreu um erro ao tentar roubar o(a) {member.name}**',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
elif num <= 0:
try:
if valores_lidos3[0] >= 7000:
if valores_lidos2[0] >= 7000:
if 0 < valores_lidos4[0] <= 7:
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {pessoa}"
cursor.execute(sqlinsert)
mydb.commit()
else:
await a.edit(
embed=discord.Embed(title='<:error:788824695184424980> Você não possui uma arma',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] + num}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
await a.edit(embed=discord.Embed(
title=f'<:error:788824695184424980> Um policial te deu uma multa de {num} *fenicoins* por tentar roubar {member.name}',
color=0xFF0000
))
await member.send(
f'Ou, parecer que o `{ctx.author}` acabou de tentar roubar você e perdeu **`{num} fenicoins`**. **Eu não deixaria sair barato!!**')
await asyncio.sleep(2)
if valores_lidos4[0] == 1:
await ctx.channel.send(embed=discord.Embed(
title=f'Após tentar roubar {member.name}, a sua arma quebra! ***track***',
color=0xFF0000))
else:
await a.edit(embed=discord.Embed(
title=f'**<:error:788824695184424980> Para poder roubar, você {ctx.author.name} precisa de no mínimo 7000 *fenicoins* **',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
else:
await a.edit(embed=discord.Embed(
title=f'**<:error:788824695184424980> Olá {ctx.author.name}, eu estou protegendo o(a) {member.name} até ele(a) conseguir 7000 *fenicoins* **',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
except:
await a.edit(embed=discord.Embed(
title=f'<:error:788824695184424980> ** Ocorreu um erro ao tentar roubar o(a) {member.name}**',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
async def Arma(ctx):
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
sqlinsert4 = f'SELECT arma FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchone()
try:
print(valores_lidos2[0])
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> **Você não possui uma conta no fênix!!**',
color=0xFF0000))
return
a = await ctx.channel.send('**Oque você deseja?\n'
'[ 1 ] Ver Arma \n'
'[ 2 ] Comprar Arma\n'
'[ 3 ] Cancelar**')
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
f'***Demorou De Mais Para Escolher {ctx.author.mention}.***')
else:
resposta25 = str(msg25.content)
if resposta25[0] == '1':
if valores_lidos4[0] == -1 or valores_lidos4[0] == 0:
await ctx.channel.send(embed=discord.Embed(title='<:error:788824695184424980> **Você não possui uma arma**',
color=0xFF0000))
return
embed12 = discord.Embed(title=f'Você possui uma arma com ***{valores_lidos4[0]}*** de resistência',
color=0x8A2BE2)
await ctx.send(embed=embed12)
return
if resposta25[0] == '2':
embed = discord.Embed(title='Arma 1',
description='Estatística : 3 de resistência\n'
'Valor : **5000 *fenicoins* **',
color=0xFF00FF)
embed1 = discord.Embed(title='Arma 2',
description='Estatística : 5 de resistência\n'
'Valor : **7000 *fenicoins* **',
color=0xFF00FF)
embed2 = discord.Embed(title='Arma 3',
description='Estatística : 7 de resistência\n'
'Valor : **9000 *fenicoins* **',
color=0xFF00FF)
embed2.set_image(
url='https://media.discordapp.net/attachments/788064370722340885/788170522134970429/kisspng-38-special-revolver-firearm-pistol-smith-wesson-5b0b15088c94c1.png')
embed1.set_image(
url='https://media.discordapp.net/attachments/788064370722340885/788170572033949696/5bc0eaf503b8e-a0a4375043583f46cdac42d2f6e2d1c7.png')
embed.set_image(
url='https://media.discordapp.net/attachments/788064370722340885/788170539964694528/1607959158347.png')
try:
b = await ctx.author.send(embed=embed)
await asyncio.sleep(1)
c = await ctx.author.send(embed=embed1)
await asyncio.sleep(1)
d = await ctx.author.send(embed=embed2)
await asyncio.sleep(1)
await ctx.channel.send(':calling: **Enviei as informações das armas que você pode comprar no seu DM!!**')
except:
await ctx.channel.send(
'<:error:788824695184424980> Ops parece que seu DM está bloqueado. Para dar esse comando você precisa desbloquear.')
return
await asyncio.sleep(2)
await ctx.send('**Qual das armas enviadas no seu DM você deseja?** ***ex:*** 1, 2, 3 ou cancelar')
await a.delete()
await msg25.delete()
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
f'***Demorou De Mais Para Escolher {ctx.author.mention}.***')
else:
resposta27 = str(msg25.content).lower()
if resposta27 == 'arma 1' or resposta27 == '1' and valores_lidos2[0] >= 5000:
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 5000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{3}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(':white_check_mark: **Arma comprada com sucesso**')
elif resposta27 == 'arma 2' or resposta27 == '2' and valores_lidos2[0] >= 7000:
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 7000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{5}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(':white_check_mark: **Arma comprada com sucesso**')
elif resposta27 == 'arma 3' or resposta27 == '3' and valores_lidos2[0] >= 9000:
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - 9000}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{7}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
await ctx.channel.send(':white_check_mark: **Arma comprada com sucesso**')
elif resposta27 == 'cancelar' or resposta27 == 'cancel':
await ctx.channel.send(embed=discord.Embed(title='<:error:788824695184424980> Compra Cancelada',
color=0xFF0000))
else:
await ctx.channel.send(embed=discord.Embed(
title=f'<:error:788824695184424980> Olá {ctx.author.name}, Você não possui **saldo suficiente** para a compra!',
color=0xFF0000))
else:
await ctx.channel.send(f'**<:error:788824695184424980> Olá {ctx.author.mention}, a sua escolha foi cancelada**')
await b.delete()
await c.delete()
await d.delete()
async def Fenicoins(ctx, member: discord.Member = None):
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
sqlinsert4 = f'select id from dinheiro ORDER BY dinheiro desc'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchall()
pessoa = ctx.author.id
await ctx.message.delete()
if not member:
try:
print(valores_lidos2[0])
except:
await ctx.channel.send(embed=discord.Embed(
title='<:error:788824695184424980> ** Você não possui uma conta no fênix!!**',
color=0xFF0000))
return
for index, element in enumerate(valores_lidos4):
if element[0] == pessoa:
num1 = index + 1
break
coin = (
f'<:fenix:787131059669303358>| Olá {ctx.author.mention} parece que você possui **{valores_lidos2[0]} fenicoins** e está em **#{num1} lugar** no ranking!')
await ctx.channel.send(coin)
else:
membro = member.id
for index, element in enumerate(valores_lidos4):
if element[0] == membro:
num2 = index + 1
break
sqlinsert3 = f'SELECT dinheiro FROM dinheiro WHERE id = {member.id}'
cursor.execute(sqlinsert3)
valores_lidos3 = cursor.fetchone()
try:
print(valores_lidos3[0])
except:
await ctx.channel.send(embed=discord.Embed(
title='<:error:788824695184424980> ** Esse usuário não possui uma conta no fênix!!**',
color=0xFF0000))
return
coin = (
f'<:fenix:787131059669303358>| Olá {ctx.author.mention} parece que o(a) {member.mention} possui **{valores_lidos3[0]} fenicoins** e está em **#{num2} lugar**!')
await ctx.channel.send(coin)
async def Top_global(ctx):
sqlinsert4 = f'select nome from dinheiro ORDER BY dinheiro desc'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchall()
sqlinsert5 = f'select dinheiro from dinheiro ORDER BY dinheiro desc'
cursor.execute(sqlinsert5)
valores_lidos5 = cursor.fetchall()
for index, element in enumerate(valores_lidos5):
if index == 0:
num10 = element[0]
if index == 1:
num20 = element[0]
if index == 2:
num30 = element[0]
break
for index, element in enumerate(valores_lidos4):
if index == 0:
num1 = element[0]
if index == 1:
num2 = element[0]
if index == 2:
num3 = element[0]
break
embed = discord.Embed(
title='<a:Top:815388123039006741> Os Tops 3 no Ranking <a:Top:815388123039006741>',
description='',
color=0x0000FF)
embed.set_thumbnail(
url='https://media.discordapp.net/attachments/788064370722340885/811621833484140555/edificio-de-banco-retro-dos-desenhos-animados-ou-tribunal-com-ilustracao-de-colunas-isolada-no-branc.png')
embed.add_field(name='<a:emoji_42:815378184219918336> no Ranking', value=f'{num1}\n**{num10}**', inline=True)
embed.add_field(name='<a:emoji_44:815378316617580575> no Ranking', value=f'{num2}\n**{num20}**', inline=True)
embed.add_field(name='<a:emoji_43:815378237563207680> no Ranking', value=f'{num3}\n**{num30}**', inline=True)
await ctx.reply(ctx.author.mention, embed=embed)
async def Roubar_banco(ctx, member1: discord.Member = None):
# DADOS
# DINHEIRO AUTOR
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert2)
valores_lidos2 = cursor.fetchone()
# DINHEIRO DO MEMBER1
try:
sqlinsert2 = f'SELECT dinheiro FROM dinheiro WHERE id = {member1.id}'
cursor.execute(sqlinsert2)
valores_lidos3 = cursor.fetchone()
except:
pass
# ARMA DO AUTOR
sqlinsert4 = f'SELECT arma FROM dinheiro WHERE id = {ctx.author.id}'
cursor.execute(sqlinsert4)
valores_lidos4 = cursor.fetchone()
# ARMA DO MEMBER1
try:
sqlinsert4 = f'SELECT arma FROM dinheiro WHERE id = {member1.id}'
cursor.execute(sqlinsert4)
valores_lidos5 = cursor.fetchone()
except:
pass
# erro de não possuir uma conta
try:
print(valores_lidos2[0])
except:
await ctx.channel.send(
embed=discord.Embed(title='<:error:788824695184424980> **Você não possui uma conta no fênix!!**',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
try:
if ctx.author == member1:
return await ctx.channel.send('<:error:788824695184424980> **Você não pode roubar com si mesmo**')
ctx.command.reset_cooldown(ctx)
except:
pass
# tem certeza?
if 7 >= valores_lidos4[0] > 0:
print('teste')
if not member1:
embed = discord.Embed(
description='**Tem certeza que deseja roubar o banco sozinho? **\n\nVocê pode roubar com até 2 pessoas, e ter mais chance de sucesso!')
embed.set_footer(text='escreva Sim ou Não')
await ctx.channel.send(embed=embed)
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=100, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
f'***Demorou De Mais Para Aceitar {ctx.author.mention}.***')
ctx.command.reset_cooldown(ctx)
else:
resposta25 = str(msg25.content).lower()
num = random.randint(1, 4)
if resposta25 == 'sim':
b = await ctx.channel.send(
embed=discord.Embed(title='<a:loading:785523240944664646> Roubando o banco'
))
await asyncio.sleep(10)
# possibilidades
if num == 1:
money1 = random.randint(2500, 15000)
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] + money1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed1 = discord.Embed(
description=f':white_check_mark: Você conseguiu sair com a grana roubada! **Parabéns {ctx.author.name}!** <a:Top:815388123039006741>',
color=0x00FF00)
embed1.set_footer(text=f'Você recebeu {money1} fenicoins')
await ctx.channel.send(embed=embed1)
return await b.delete()
if num == 2:
money2 = random.randint(2500, 13000)
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - money2}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed1 = discord.Embed(
description=f'<:error:788824695184424980> Você foi preso e levou uma multa de **{money2} fenicoins**',
color=0xFF0000)
embed1.set_footer(text=f'Você perdeu {money2} fenicoins')
await ctx.channel.send(embed=embed1)
return await b.delete()
if num == 3:
money3 = random.randint(2500, 13000)
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - money3}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed1 = discord.Embed(
description=f'<:error:788824695184424980> Você foi preso e levou uma multa de **{money3} fenicoins**',
color=0xFF0000)
embed1.set_footer(text=f'Você perdeu {money3} fenicoins')
await ctx.channel.send(embed=embed1)
return await b.delete()
if num == 4:
money4 = random.randint(2500, 13000)
sqlinsert = f"UPDATE dinheiro SET dinheiro = '{valores_lidos2[0] - money4}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed1 = discord.Embed(
description=f'<:error:788824695184424980> Você foi preso e levou uma multa de **{money4} fenicoins**',
color=0xFF0000)
embed1.set_footer(text=f'Você perdeu {money4} fenicoins')
await ctx.channel.send(embed=embed1)
return await b.delete()
else:
await ctx.channel.send(':white_check_mark: **Roubo cancelado com sucesso!**')
ctx.command.reset_cooldown(ctx)
return
else:
await ctx.channel.send('<:error:788824695184424980> **Você não possui uma arma!**')
ctx.command.reset_cooldown(ctx)
return
sqlinsert4 = f'SELECT arma FROM dinheiro WHERE id = {member1.id}'
cursor.execute(sqlinsert4)
valores_lidos5 = cursor.fetchone()
# roubar banco com algm
try:
print(valores_lidos3[0])
except:
await ctx.channel.send(
embed=discord.Embed(
title=f'<:error:788824695184424980> **O(a) {member1.name} não possui uma conta no fênix!!**',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
return
if 7 >= valores_lidos4[0] > 0 or 7 > valores_lidos5[0] > 0:
if member1:
embed = discord.Embed(
description=f'Tem certeza que deseja roubar o banco com o(a) ***{member1.name}*** \n\nVocê pode roubar com até 2 pessoas, e ter mais chance de sucesso!',
color=0x8A2BE2)
embed.set_footer(text='escreva Sim ou Não')
await ctx.channel.send(embed=embed)
def check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg25 = await bot.wait_for('message', timeout=100.0, check=check)
except asyncio.TimeoutError:
return await ctx.channel.send(
f'***Demorou De Mais Para aceitar {ctx.author.mention}.***')
ctx.command.reset_cooldown(ctx)
else:
resposta25 = str(msg25.content).lower()
print(resposta25)
if resposta25 == 'sim':
# confirmação do member1
a = await ctx.channel.send(f'**<a:loading:785523240944664646> aguardando o(a) {member1.name} aceitar**')
embed = discord.Embed(
description=f'O(a) **{ctx.author}** quer assaltar um banco com você no servidor **{ctx.author.guild.name}**! \n\nAceita sim ou não? ',
color=0x8A2BE2)
embed.set_footer(text='escreva Sim ou Não')
try:
await member1.send(embed=embed)
except:
await ctx.channel.send(
f'<:error:788824695184424980> **Parece que o privado do {member1.name} está privado!! não consigo continuar o roubo assim.')
return
def check(m):
return m.author == ctx.author and m.guild == None
try:
msg25 = await bot.wait_for('message', timeout=1000.0, check=check)
except asyncio.TimeoutError:
return await member1.send(
f'***<:error:788824695184424980> Demorou De Mais Para Aceitar {ctx.author.mention}.***')
ctx.command.reset_cooldown(ctx)
else:
resposta26 = str(msg25.content).lower()
print(resposta26)
num1 = random.randint(1, 4)
if resposta26 == 'não':
await a.edit(content=f'<:error:788824695184424980> o(a) **{member1.name}** não aceitou!!')
ctx.command.reset_cooldown(ctx)
return
if resposta26 == 'sim':
# DIMINUIR RESISTÊNCIA DA ARMA
if 0 < valores_lidos4[0] <= 7 and 0 >= valores_lidos5[0]:
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
elif 0 < valores_lidos5[0] <= 7 and 0 >= valores_lidos4[0]:
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos5[0] - 1}' WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
else:
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos5[0] - 1}' WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
sqlinsert = f"UPDATE dinheiro SET arma = '{valores_lidos4[0] - 1}' WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
await member1.send(embed=discord.Embed(title=':white_check_mark: Roubo aceito com sucesso!',
color=0x00FF00))
# possibilidades
if num1 == 1:
await asyncio.sleep(3.5)
await a.edit(content=f':white_check_mark: o(a) **{member1.name}** aceitou!!')
await asyncio.sleep(3.5)
b = await ctx.channel.send(
embed=discord.Embed(title='<a:loading:785523240944664646> Roubando o banco'
))
await asyncio.sleep(10)
money1 = random.randint(4500, 16000)
# Update do dinheiro do autor
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos2[0] + (money1 / 2)} WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
# update do dinheiro do member1
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos3[0] + (money1 / 2)} WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed2 = discord.Embed(
description=f':white_check_mark: Vocês conseguiram sair com **{money1} fenicoins** roubados! Cada um vai ficar com **{money1 / 2} fenicoins**! **Parabéns {ctx.author.name} e {member1.name}!** <a:Top:815388123039006741>',
color=0x00FF00)
await b.edit(embed=embed2)
elif num1 == 2:
await asyncio.sleep(3.5)
await a.edit(content=f':white_check_mark: o(a) **{member1.name}** aceitou!!')
await asyncio.sleep(3.5)
b = await ctx.channel.send(
embed=discord.Embed(title='<a:loading:785523240944664646> Roubando o banco'
))
await asyncio.sleep(10)
money1 = random.randint(4500, 16000)
# Update do dinheiro do autor
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos2[0] - (money1 / 2)} WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
# update do dinheiro do member1
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos3[0] - (money1 / 2)} WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed2 = discord.Embed(
description=f'<:error:788824695184424980> Vocês perderam **{money1} fenicoins** após pagar uma multa por tentar roubar o banco! Cada um vai perder **{money1 / 2} fenicoins**! **valeu a tentativa {ctx.author.name} e {member1.name}!** <a:Top:815388123039006741>',
color=0xFF0000)
await b.edit(embed=embed2)
elif num1 == 3:
await asyncio.sleep(3.5)
await a.edit(content=f':white_check_mark: o(a) **{member1.name}** aceitou!!')
await asyncio.sleep(3.5)
b = await ctx.channel.send(
embed=discord.Embed(title='<a:loading:785523240944664646> Roubando o banco'))
await asyncio.sleep(10)
money1 = random.randint(4500, 16000)
# Update do dinheiro do autor
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos2[0] - (money1 / 2)} WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
# update do dinheiro do member1
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos3[0] - (money1 / 2)} WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed2 = discord.Embed(
description=f'<:error:788824695184424980> Vocês perderam **{money1} fenicoins** após pagar uma multa por tentar roubar o banco! Cada um vai perder **{money1 / 2} fenicoins**! **valeu a tentativa {ctx.author.name} e {member1.name}!** <a:Top:815388123039006741>',
color=0xFF0000)
await b.edit(embed=embed2)
else:
await asyncio.sleep(3.5)
await a.edit(content=f':white_check_mark: o(a) **{member1.name}** aceitou!!')
await asyncio.sleep(3.5)
b = await ctx.channel.send(
embed=discord.Embed(title='<a:loading:785523240944664646> Roubando o banco'
))
await asyncio.sleep(10)
money1 = random.randint(4500, 16000)
# Update do dinheiro do autor
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos3[0] + (money1 / 2)} WHERE id = {ctx.author.id}"
cursor.execute(sqlinsert)
mydb.commit()
# update do dinheiro do member1
sqlinsert = f"UPDATE dinheiro SET dinheiro = {valores_lidos2[0] + (money1 / 2)} WHERE id = {member1.id}"
cursor.execute(sqlinsert)
mydb.commit()
embed2 = discord.Embed(
description=f':white_check_mark: Vocês conseguiram sair com **{money1} fenicoins** roubados! Cada um vai ficar com **{money1 / 2} fenicoins**! **Parabéns {ctx.author.name} e {member1.name}!** <a:Top:815388123039006741>',
color=0x00FF00)
await b.edit(embed=embed2)
else:
await member1.send(embed=discord.Embed(title=':white_check_mark: Roubo recusado com sucesso!',
color=0x00FF00))
await a.delete()
await ctx.channel.send(embed=discord.Embed(
title=f'<:error:788824695184424980> O **{member1.name}** não aceitou o roubo!',
color=0xFF0000))
ctx.command.reset_cooldown(ctx)
else:
await ctx.channel.send('**<:error:788824695184424980> roubo cancelado**')
ctx.command.reset_cooldown(ctx)
else:
await ctx.channel.send('**<:error:788824695184424980> Parece que você não possui uma arma!**')
ctx.command.reset_cooldown(ctx)
| 38.475073
| 291
| 0.538338
| 5,514
| 52,480
| 5.082517
| 0.078346
| 0.031472
| 0.045495
| 0.057627
| 0.806922
| 0.773452
| 0.755468
| 0.738591
| 0.709866
| 0.680785
| 0
| 0.082669
| 0.355069
| 52,480
| 1,363
| 292
| 38.503302
| 0.745347
| 0.008556
| 0
| 0.71928
| 0
| 0.073093
| 0.309266
| 0.048743
| 0
| 0
| 0.008576
| 0
| 0
| 1
| 0.013771
| false
| 0.003178
| 0.010593
| 0.013771
| 0.084746
| 0.01589
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9924427d0fba393f8d369d7c2c9afa1bd85a741d
| 21,471
|
py
|
Python
|
projects/cats/tests/07.py
|
jjllzhang/CS61A
|
57b68c7c06999210d96499f6d84e4ec99085d396
|
[
"MIT"
] | 1
|
2022-01-22T11:45:01.000Z
|
2022-01-22T11:45:01.000Z
|
projects/cats/tests/07.py
|
jjllzhang/CS61A
|
57b68c7c06999210d96499f6d84e4ec99085d396
|
[
"MIT"
] | null | null | null |
projects/cats/tests/07.py
|
jjllzhang/CS61A
|
57b68c7c06999210d96499f6d84e4ec99085d396
|
[
"MIT"
] | null | null | null |
test = {
'name': 'Problem 7',
'points': 3,
'suites': [
{
'cases': [
{
'code': r"""
>>> big_limit = 10
>>> meowstake_matches("wird", "wiry", big_limit)
1
>>> meowstake_matches("wird", "bird", big_limit)
1
>>> meowstake_matches("wird", "wir", big_limit)
1
>>> meowstake_matches("wird", "bwird", big_limit)
1
>>> meowstake_matches("speling", "spelling", big_limit)
1
>>> meowstake_matches("used", "use", big_limit)
1
>>> meowstake_matches("hash", "ash", big_limit)
1
>>> meowstake_matches("ash", "hash", big_limit)
1
>>> meowstake_matches("roses", "arose", big_limit) # roses -> aroses -> arose
2
>>> meowstake_matches("tesng", "testing", big_limit) # tesng -> testng -> testing
2
>>> meowstake_matches("rlogcul", "logical", big_limit) # rlogcul -> logcul -> logicul -> logical
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> small_words_list = ["spell", "nest", "test", "pest", "best", "bird", "wired",
... "abstraction", "abstract", "wire", "peeling", "gestate",
... "west", "spelling", "bastion"]
>>> autocorrect("speling", small_words_list, meowstake_matches, 10)
'spelling'
>>> autocorrect("abstrction", small_words_list, meowstake_matches, 10)
'abstraction'
>>> autocorrect("wird", small_words_list, meowstake_matches, 10)
'bird'
>>> autocorrect("gest", small_words_list, meowstake_matches, 10)
'nest'
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # ***Check that the recursion stops when the limit is reached***
>>> import trace, io
>>> from contextlib import redirect_stdout
>>> with io.StringIO() as buf, redirect_stdout(buf):
... trace.Trace(trace=True).runfunc(meowstake_matches, "someawe", "awesome", 3)
... output = buf.getvalue()
>>> len([line for line in output.split('\n') if 'funcname' in line]) < 1000
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('thong', 'thong', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('place', 'wreat', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('pray', 'okee', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('cloit', 'cloit', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('yond', 'snd', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('tb', 'tb', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('gobi', 'gobi', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('watap', 'woitap', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('baffy', 'btfi', k) > k for k in range(5)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('else', 'konak', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('zygon', 'jzon', k) > k for k in range(5)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('lar', 'lar', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('shop', 'wopd', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('pc', 'pc', k) > k for k in range(2)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('sail', 'sail', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('fiber', 'fbk', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('doff', 'def', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('meile', 'mqeile', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('donor', 'doinor', k) > k for k in range(6)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('meet', 'meeu', k) > k for k in range(4)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('tic', 'tih', k) > k for k in range(3)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('taft', 'hewer', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('moorn', 'toxa', k) > k for k in range(5)])
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('hamal', 'hamal', k) > k for k in range(5)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('pridy', 'dance', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('dekko', 'zbk', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('julio', 'juio', k) > k for k in range(5)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('boist', 'spume', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('jail', 'jaila', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('cumin', 'goes', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('civil', 'whose', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('stead', 'ny', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('mikie', 'mdiye', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('utils', 'utils', k) > k for k in range(5)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('nuque', 'nuq', k) > k for k in range(5)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('chine', 'ziinx', k) > k for k in range(5)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('tour', 'erase', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('ak', 'rose', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('sawah', 'shape', k) > k for k in range(5)])
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('elb', 'logia', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('noily', 'oibs', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('fluid', 'grad', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('titer', 'tskhteur', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('shood', 'shood', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('sher', 'xdhe', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('dayal', 'qualm', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('tenai', 'whata', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('bow', 'how', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('tony', 'togqq', k) > k for k in range(5)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('baby', 'ton', k) > k for k in range(4)])
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('seron', 'seron', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('tame', 'tfme', k) > k for k in range(4)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('kissy', 'kisdsxk', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('str', 'st', k) > k for k in range(3)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('enema', 'nemr', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('beden', 'beden', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('coral', 'coral', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('hack', 'rhack', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('alan', 'alan', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('aru', 'aru', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('tail', 'taiil', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('corps', 'ckcp', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('kazi', 'kazi', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('bone', 'bone', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('dee', 'derv', k) > k for k in range(4)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('fuder', 'fuder', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('harl', 'hhtar', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('def', 'df', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('moio', 'yomo', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('amnia', 'wna', k) > k for k in range(5)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('pair', 'pair', k) > k for k in range(4)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('peai', 'eabi', k) > k for k in range(4)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('pryse', 'prysvf', k) > k for k in range(6)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('amelu', 'samp', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('weak', 'wk', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('atelo', 'atelo', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('uc', 'kc', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('strew', 'jaup', k) > k for k in range(5)])
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('dome', 'dume', k) > k for k in range(4)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('braze', 'sxaze', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('zaman', 'zadpamn', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('twank', 'renne', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('pinky', 'opiky', k) > k for k in range(5)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('spoke', 'spoke', k) > k for k in range(5)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('recto', 'recto', k) > k for k in range(5)])
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('ula', 'ula', 100)
0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('dame', 'froth', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('grane', 'griae', 100)
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('cycad', 'cqcad', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('creem', 'ashreem', 100)
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('alky', 'alfy', k) > k for k in range(4)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('finds', 'fid', k) > k for k in range(5)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('argot', 'arxgot', k) > k for k in range(6)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('lc', 'roost', 100)
5
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('mi', 'iran', 100)
4
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('faded', 'fabehc', k) > k for k in range(6)])
3
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('slee', 'ble', k) > k for k in range(4)])
2
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> meowstake_matches('macro', 'macr', 100)
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('bbs', 'bbj', k) > k for k in range(3)])
1
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> sum([meowstake_matches('roud', 'roud', k) > k for k in range(4)])
0
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> from cats import meowstake_matches, autocorrect
""",
'teardown': '',
'type': 'doctest'
}
]
}
| 24.482326
| 106
| 0.348703
| 1,716
| 21,471
| 4.280886
| 0.164336
| 0.254833
| 0.238361
| 0.308467
| 0.772665
| 0.755649
| 0.720937
| 0.709502
| 0.70324
| 0.676559
| 0
| 0.030813
| 0.473988
| 21,471
| 876
| 107
| 24.510274
| 0.619621
| 0
| 0
| 0.471461
| 0
| 0.049087
| 0.585907
| 0.145871
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003425
| 0
| 0.003425
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9937dca1a52f7735c72b5888b0b9fa53cf98c139
| 87
|
py
|
Python
|
point_gcn/__init__.py
|
gyshgx868/pc-classification
|
1667f08785e89bbe475fe7b4dbf8141a29d8c371
|
[
"MIT"
] | 7
|
2020-10-23T10:23:14.000Z
|
2021-10-06T02:04:02.000Z
|
point_gcn/__init__.py
|
gyshgx868/pc-classification
|
1667f08785e89bbe475fe7b4dbf8141a29d8c371
|
[
"MIT"
] | null | null | null |
point_gcn/__init__.py
|
gyshgx868/pc-classification
|
1667f08785e89bbe475fe7b4dbf8141a29d8c371
|
[
"MIT"
] | null | null | null |
from point_gcn import dataset
from point_gcn import models
from point_gcn import tools
| 21.75
| 29
| 0.862069
| 15
| 87
| 4.8
| 0.466667
| 0.375
| 0.5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137931
| 87
| 3
| 30
| 29
| 0.96
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9996b3aaeac54e992f7a30031977cade9cb8257e
| 31,057
|
py
|
Python
|
OttBands1min/vars.py
|
ysdede/jesse_strategies
|
ade9f4ba42cec11207c766d267b9d8feb8bce648
|
[
"CC0-1.0"
] | 38
|
2021-09-18T15:33:28.000Z
|
2022-02-21T17:29:08.000Z
|
OttBands1min/vars.py
|
ysdede/jesse_strategies
|
ade9f4ba42cec11207c766d267b9d8feb8bce648
|
[
"CC0-1.0"
] | 4
|
2022-01-02T14:46:12.000Z
|
2022-02-16T18:39:41.000Z
|
OttBands1min/vars.py
|
ysdede/jesse_strategies
|
ade9f4ba42cec11207c766d267b9d8feb8bce648
|
[
"CC0-1.0"
] | 11
|
2021-10-19T06:21:43.000Z
|
2022-02-21T17:29:10.000Z
|
# len = 1001
tp_qtys = (
(0.0, 0.0, 0.0, 0.0, 1.0),
(0.0, 0.0, 0.0, 0.1, 0.9),
(0.0, 0.0, 0.0, 0.2, 0.8),
(0.0, 0.0, 0.0, 0.3, 0.7),
(0.0, 0.0, 0.0, 0.4, 0.6),
(0.0, 0.0, 0.0, 0.5, 0.5),
(0.0, 0.0, 0.0, 0.6, 0.4),
(0.0, 0.0, 0.0, 0.7, 0.3),
(0.0, 0.0, 0.0, 0.8, 0.2),
(0.0, 0.0, 0.0, 0.9, 0.1),
(0.0, 0.0, 0.0, 1.0, 0.0),
(0.0, 0.0, 0.1, 0.0, 0.9),
(0.0, 0.0, 0.1, 0.1, 0.8),
(0.0, 0.0, 0.1, 0.2, 0.7),
(0.0, 0.0, 0.1, 0.3, 0.6),
(0.0, 0.0, 0.1, 0.4, 0.5),
(0.0, 0.0, 0.1, 0.5, 0.4),
(0.0, 0.0, 0.1, 0.6, 0.3),
(0.0, 0.0, 0.1, 0.7, 0.2),
(0.0, 0.0, 0.1, 0.8, 0.1),
(0.0, 0.0, 0.1, 0.9, 0.0),
(0.0, 0.0, 0.2, 0.0, 0.8),
(0.0, 0.0, 0.2, 0.1, 0.7),
(0.0, 0.0, 0.2, 0.2, 0.6),
(0.0, 0.0, 0.2, 0.3, 0.5),
(0.0, 0.0, 0.2, 0.4, 0.4),
(0.0, 0.0, 0.2, 0.5, 0.3),
(0.0, 0.0, 0.2, 0.6, 0.2),
(0.0, 0.0, 0.2, 0.7, 0.1),
(0.0, 0.0, 0.2, 0.8, 0.0),
(0.0, 0.0, 0.3, 0.0, 0.7),
(0.0, 0.0, 0.3, 0.1, 0.6),
(0.0, 0.0, 0.3, 0.2, 0.5),
(0.0, 0.0, 0.3, 0.3, 0.4),
(0.0, 0.0, 0.3, 0.4, 0.3),
(0.0, 0.0, 0.3, 0.5, 0.2),
(0.0, 0.0, 0.3, 0.6, 0.1),
(0.0, 0.0, 0.3, 0.7, 0.0),
(0.0, 0.0, 0.4, 0.0, 0.6),
(0.0, 0.0, 0.4, 0.1, 0.5),
(0.0, 0.0, 0.4, 0.2, 0.4),
(0.0, 0.0, 0.4, 0.3, 0.3),
(0.0, 0.0, 0.4, 0.4, 0.2),
(0.0, 0.0, 0.4, 0.5, 0.1),
(0.0, 0.0, 0.4, 0.6, 0.0),
(0.0, 0.0, 0.5, 0.0, 0.5),
(0.0, 0.0, 0.5, 0.1, 0.4),
(0.0, 0.0, 0.5, 0.2, 0.3),
(0.0, 0.0, 0.5, 0.3, 0.2),
(0.0, 0.0, 0.5, 0.4, 0.1),
(0.0, 0.0, 0.5, 0.5, 0.0),
(0.0, 0.0, 0.6, 0.0, 0.4),
(0.0, 0.0, 0.6, 0.1, 0.3),
(0.0, 0.0, 0.6, 0.2, 0.2),
(0.0, 0.0, 0.6, 0.3, 0.1),
(0.0, 0.0, 0.6, 0.4, 0.0),
(0.0, 0.0, 0.7, 0.0, 0.3),
(0.0, 0.0, 0.7, 0.1, 0.2),
(0.0, 0.0, 0.7, 0.2, 0.1),
(0.0, 0.0, 0.7, 0.3, 0.0),
(0.0, 0.0, 0.8, 0.0, 0.2),
(0.0, 0.0, 0.8, 0.1, 0.1),
(0.0, 0.0, 0.8, 0.2, 0.0),
(0.0, 0.0, 0.9, 0.0, 0.1),
(0.0, 0.0, 0.9, 0.1, 0.0),
(0.0, 0.0, 1.0, 0.0, 0.0),
(0.0, 0.1, 0.0, 0.0, 0.9),
(0.0, 0.1, 0.0, 0.1, 0.8),
(0.0, 0.1, 0.0, 0.2, 0.7),
(0.0, 0.1, 0.0, 0.3, 0.6),
(0.0, 0.1, 0.0, 0.4, 0.5),
(0.0, 0.1, 0.0, 0.5, 0.4),
(0.0, 0.1, 0.0, 0.6, 0.3),
(0.0, 0.1, 0.0, 0.7, 0.2),
(0.0, 0.1, 0.0, 0.8, 0.1),
(0.0, 0.1, 0.0, 0.9, 0.0),
(0.0, 0.1, 0.1, 0.0, 0.8),
(0.0, 0.1, 0.1, 0.1, 0.7),
(0.0, 0.1, 0.1, 0.2, 0.6),
(0.0, 0.1, 0.1, 0.3, 0.5),
(0.0, 0.1, 0.1, 0.4, 0.4),
(0.0, 0.1, 0.1, 0.5, 0.3),
(0.0, 0.1, 0.1, 0.6, 0.2),
(0.0, 0.1, 0.1, 0.7, 0.1),
(0.0, 0.1, 0.1, 0.8, 0.0),
(0.0, 0.1, 0.2, 0.0, 0.7),
(0.0, 0.1, 0.2, 0.1, 0.6),
(0.0, 0.1, 0.2, 0.2, 0.5),
(0.0, 0.1, 0.2, 0.3, 0.4),
(0.0, 0.1, 0.2, 0.4, 0.3),
(0.0, 0.1, 0.2, 0.5, 0.2),
(0.0, 0.1, 0.2, 0.6, 0.1),
(0.0, 0.1, 0.2, 0.7, 0.0),
(0.0, 0.1, 0.3, 0.0, 0.6),
(0.0, 0.1, 0.3, 0.1, 0.5),
(0.0, 0.1, 0.3, 0.2, 0.4),
(0.0, 0.1, 0.3, 0.3, 0.3),
(0.0, 0.1, 0.3, 0.4, 0.2),
(0.0, 0.1, 0.3, 0.5, 0.1),
(0.0, 0.1, 0.3, 0.6, 0.0),
(0.0, 0.1, 0.4, 0.0, 0.5),
(0.0, 0.1, 0.4, 0.1, 0.4),
(0.0, 0.1, 0.4, 0.2, 0.3),
(0.0, 0.1, 0.4, 0.3, 0.2),
(0.0, 0.1, 0.4, 0.4, 0.1),
(0.0, 0.1, 0.4, 0.5, 0.0),
(0.0, 0.1, 0.5, 0.0, 0.4),
(0.0, 0.1, 0.5, 0.1, 0.3),
(0.0, 0.1, 0.5, 0.2, 0.2),
(0.0, 0.1, 0.5, 0.3, 0.1),
(0.0, 0.1, 0.5, 0.4, 0.0),
(0.0, 0.1, 0.6, 0.0, 0.3),
(0.0, 0.1, 0.6, 0.1, 0.2),
(0.0, 0.1, 0.6, 0.2, 0.1),
(0.0, 0.1, 0.6, 0.3, 0.0),
(0.0, 0.1, 0.7, 0.0, 0.2),
(0.0, 0.1, 0.7, 0.1, 0.1),
(0.0, 0.1, 0.7, 0.2, 0.0),
(0.0, 0.1, 0.8, 0.0, 0.1),
(0.0, 0.1, 0.8, 0.1, 0.0),
(0.0, 0.1, 0.9, 0.0, 0.0),
(0.0, 0.2, 0.0, 0.0, 0.8),
(0.0, 0.2, 0.0, 0.1, 0.7),
(0.0, 0.2, 0.0, 0.2, 0.6),
(0.0, 0.2, 0.0, 0.3, 0.5),
(0.0, 0.2, 0.0, 0.4, 0.4),
(0.0, 0.2, 0.0, 0.5, 0.3),
(0.0, 0.2, 0.0, 0.6, 0.2),
(0.0, 0.2, 0.0, 0.7, 0.1),
(0.0, 0.2, 0.0, 0.8, 0.0),
(0.0, 0.2, 0.1, 0.0, 0.7),
(0.0, 0.2, 0.1, 0.1, 0.6),
(0.0, 0.2, 0.1, 0.2, 0.5),
(0.0, 0.2, 0.1, 0.3, 0.4),
(0.0, 0.2, 0.1, 0.4, 0.3),
(0.0, 0.2, 0.1, 0.5, 0.2),
(0.0, 0.2, 0.1, 0.6, 0.1),
(0.0, 0.2, 0.1, 0.7, 0.0),
(0.0, 0.2, 0.2, 0.0, 0.6),
(0.0, 0.2, 0.2, 0.1, 0.5),
(0.0, 0.2, 0.2, 0.2, 0.4),
(0.0, 0.2, 0.2, 0.3, 0.3),
(0.0, 0.2, 0.2, 0.4, 0.2),
(0.0, 0.2, 0.2, 0.5, 0.1),
(0.0, 0.2, 0.2, 0.6, 0.0),
(0.0, 0.2, 0.3, 0.0, 0.5),
(0.0, 0.2, 0.3, 0.1, 0.4),
(0.0, 0.2, 0.3, 0.2, 0.3),
(0.0, 0.2, 0.3, 0.3, 0.2),
(0.0, 0.2, 0.3, 0.4, 0.1),
(0.0, 0.2, 0.3, 0.5, 0.0),
(0.0, 0.2, 0.4, 0.0, 0.4),
(0.0, 0.2, 0.4, 0.1, 0.3),
(0.0, 0.2, 0.4, 0.2, 0.2),
(0.0, 0.2, 0.4, 0.3, 0.1),
(0.0, 0.2, 0.4, 0.4, 0.0),
(0.0, 0.2, 0.5, 0.0, 0.3),
(0.0, 0.2, 0.5, 0.1, 0.2),
(0.0, 0.2, 0.5, 0.2, 0.1),
(0.0, 0.2, 0.5, 0.3, 0.0),
(0.0, 0.2, 0.6, 0.0, 0.2),
(0.0, 0.2, 0.6, 0.1, 0.1),
(0.0, 0.2, 0.6, 0.2, 0.0),
(0.0, 0.2, 0.7, 0.0, 0.1),
(0.0, 0.2, 0.7, 0.1, 0.0),
(0.0, 0.2, 0.8, 0.0, 0.0),
(0.0, 0.3, 0.0, 0.0, 0.7),
(0.0, 0.3, 0.0, 0.1, 0.6),
(0.0, 0.3, 0.0, 0.2, 0.5),
(0.0, 0.3, 0.0, 0.3, 0.4),
(0.0, 0.3, 0.0, 0.4, 0.3),
(0.0, 0.3, 0.0, 0.5, 0.2),
(0.0, 0.3, 0.0, 0.6, 0.1),
(0.0, 0.3, 0.0, 0.7, 0.0),
(0.0, 0.3, 0.1, 0.0, 0.6),
(0.0, 0.3, 0.1, 0.1, 0.5),
(0.0, 0.3, 0.1, 0.2, 0.4),
(0.0, 0.3, 0.1, 0.3, 0.3),
(0.0, 0.3, 0.1, 0.4, 0.2),
(0.0, 0.3, 0.1, 0.5, 0.1),
(0.0, 0.3, 0.1, 0.6, 0.0),
(0.0, 0.3, 0.2, 0.0, 0.5),
(0.0, 0.3, 0.2, 0.1, 0.4),
(0.0, 0.3, 0.2, 0.2, 0.3),
(0.0, 0.3, 0.2, 0.3, 0.2),
(0.0, 0.3, 0.2, 0.4, 0.1),
(0.0, 0.3, 0.2, 0.5, 0.0),
(0.0, 0.3, 0.3, 0.0, 0.4),
(0.0, 0.3, 0.3, 0.1, 0.3),
(0.0, 0.3, 0.3, 0.2, 0.2),
(0.0, 0.3, 0.3, 0.3, 0.1),
(0.0, 0.3, 0.3, 0.4, 0.0),
(0.0, 0.3, 0.4, 0.0, 0.3),
(0.0, 0.3, 0.4, 0.1, 0.2),
(0.0, 0.3, 0.4, 0.2, 0.1),
(0.0, 0.3, 0.4, 0.3, 0.0),
(0.0, 0.3, 0.5, 0.0, 0.2),
(0.0, 0.3, 0.5, 0.1, 0.1),
(0.0, 0.3, 0.5, 0.2, 0.0),
(0.0, 0.3, 0.6, 0.0, 0.1),
(0.0, 0.3, 0.6, 0.1, 0.0),
(0.0, 0.3, 0.7, 0.0, 0.0),
(0.0, 0.4, 0.0, 0.0, 0.6),
(0.0, 0.4, 0.0, 0.1, 0.5),
(0.0, 0.4, 0.0, 0.2, 0.4),
(0.0, 0.4, 0.0, 0.3, 0.3),
(0.0, 0.4, 0.0, 0.4, 0.2),
(0.0, 0.4, 0.0, 0.5, 0.1),
(0.0, 0.4, 0.0, 0.6, 0.0),
(0.0, 0.4, 0.1, 0.0, 0.5),
(0.0, 0.4, 0.1, 0.1, 0.4),
(0.0, 0.4, 0.1, 0.2, 0.3),
(0.0, 0.4, 0.1, 0.3, 0.2),
(0.0, 0.4, 0.1, 0.4, 0.1),
(0.0, 0.4, 0.1, 0.5, 0.0),
(0.0, 0.4, 0.2, 0.0, 0.4),
(0.0, 0.4, 0.2, 0.1, 0.3),
(0.0, 0.4, 0.2, 0.2, 0.2),
(0.0, 0.4, 0.2, 0.3, 0.1),
(0.0, 0.4, 0.2, 0.4, 0.0),
(0.0, 0.4, 0.3, 0.0, 0.3),
(0.0, 0.4, 0.3, 0.1, 0.2),
(0.0, 0.4, 0.3, 0.2, 0.1),
(0.0, 0.4, 0.3, 0.3, 0.0),
(0.0, 0.4, 0.4, 0.0, 0.2),
(0.0, 0.4, 0.4, 0.1, 0.1),
(0.0, 0.4, 0.4, 0.2, 0.0),
(0.0, 0.4, 0.5, 0.0, 0.1),
(0.0, 0.4, 0.5, 0.1, 0.0),
(0.0, 0.4, 0.6, 0.0, 0.0),
(0.0, 0.5, 0.0, 0.0, 0.5),
(0.0, 0.5, 0.0, 0.1, 0.4),
(0.0, 0.5, 0.0, 0.2, 0.3),
(0.0, 0.5, 0.0, 0.3, 0.2),
(0.0, 0.5, 0.0, 0.4, 0.1),
(0.0, 0.5, 0.0, 0.5, 0.0),
(0.0, 0.5, 0.1, 0.0, 0.4),
(0.0, 0.5, 0.1, 0.1, 0.3),
(0.0, 0.5, 0.1, 0.2, 0.2),
(0.0, 0.5, 0.1, 0.3, 0.1),
(0.0, 0.5, 0.1, 0.4, 0.0),
(0.0, 0.5, 0.2, 0.0, 0.3),
(0.0, 0.5, 0.2, 0.1, 0.2),
(0.0, 0.5, 0.2, 0.2, 0.1),
(0.0, 0.5, 0.2, 0.3, 0.0),
(0.0, 0.5, 0.3, 0.0, 0.2),
(0.0, 0.5, 0.3, 0.1, 0.1),
(0.0, 0.5, 0.3, 0.2, 0.0),
(0.0, 0.5, 0.4, 0.0, 0.1),
(0.0, 0.5, 0.4, 0.1, 0.0),
(0.0, 0.5, 0.5, 0.0, 0.0),
(0.0, 0.6, 0.0, 0.0, 0.4),
(0.0, 0.6, 0.0, 0.1, 0.3),
(0.0, 0.6, 0.0, 0.2, 0.2),
(0.0, 0.6, 0.0, 0.3, 0.1),
(0.0, 0.6, 0.0, 0.4, 0.0),
(0.0, 0.6, 0.1, 0.0, 0.3),
(0.0, 0.6, 0.1, 0.1, 0.2),
(0.0, 0.6, 0.1, 0.2, 0.1),
(0.0, 0.6, 0.1, 0.3, 0.0),
(0.0, 0.6, 0.2, 0.0, 0.2),
(0.0, 0.6, 0.2, 0.1, 0.1),
(0.0, 0.6, 0.2, 0.2, 0.0),
(0.0, 0.6, 0.3, 0.0, 0.1),
(0.0, 0.6, 0.3, 0.1, 0.0),
(0.0, 0.6, 0.4, 0.0, 0.0),
(0.0, 0.7, 0.0, 0.0, 0.3),
(0.0, 0.7, 0.0, 0.1, 0.2),
(0.0, 0.7, 0.0, 0.2, 0.1),
(0.0, 0.7, 0.0, 0.3, 0.0),
(0.0, 0.7, 0.1, 0.0, 0.2),
(0.0, 0.7, 0.1, 0.1, 0.1),
(0.0, 0.7, 0.1, 0.2, 0.0),
(0.0, 0.7, 0.2, 0.0, 0.1),
(0.0, 0.7, 0.2, 0.1, 0.0),
(0.0, 0.7, 0.3, 0.0, 0.0),
(0.0, 0.8, 0.0, 0.0, 0.2),
(0.0, 0.8, 0.0, 0.1, 0.1),
(0.0, 0.8, 0.0, 0.2, 0.0),
(0.0, 0.8, 0.1, 0.0, 0.1),
(0.0, 0.8, 0.1, 0.1, 0.0),
(0.0, 0.8, 0.2, 0.0, 0.0),
(0.0, 0.9, 0.0, 0.0, 0.1),
(0.0, 0.9, 0.0, 0.1, 0.0),
(0.0, 0.9, 0.1, 0.0, 0.0),
(0.0, 1.0, 0.0, 0.0, 0.0),
(0.1, 0.0, 0.0, 0.0, 0.9),
(0.1, 0.0, 0.0, 0.1, 0.8),
(0.1, 0.0, 0.0, 0.2, 0.7),
(0.1, 0.0, 0.0, 0.3, 0.6),
(0.1, 0.0, 0.0, 0.4, 0.5),
(0.1, 0.0, 0.0, 0.5, 0.4),
(0.1, 0.0, 0.0, 0.6, 0.3),
(0.1, 0.0, 0.0, 0.7, 0.2),
(0.1, 0.0, 0.0, 0.8, 0.1),
(0.1, 0.0, 0.0, 0.9, 0.0),
(0.1, 0.0, 0.1, 0.0, 0.8),
(0.1, 0.0, 0.1, 0.1, 0.7),
(0.1, 0.0, 0.1, 0.2, 0.6),
(0.1, 0.0, 0.1, 0.3, 0.5),
(0.1, 0.0, 0.1, 0.4, 0.4),
(0.1, 0.0, 0.1, 0.5, 0.3),
(0.1, 0.0, 0.1, 0.6, 0.2),
(0.1, 0.0, 0.1, 0.7, 0.1),
(0.1, 0.0, 0.1, 0.8, 0.0),
(0.1, 0.0, 0.2, 0.0, 0.7),
(0.1, 0.0, 0.2, 0.1, 0.6),
(0.1, 0.0, 0.2, 0.2, 0.5),
(0.1, 0.0, 0.2, 0.3, 0.4),
(0.1, 0.0, 0.2, 0.4, 0.3),
(0.1, 0.0, 0.2, 0.5, 0.2),
(0.1, 0.0, 0.2, 0.6, 0.1),
(0.1, 0.0, 0.2, 0.7, 0.0),
(0.1, 0.0, 0.3, 0.0, 0.6),
(0.1, 0.0, 0.3, 0.1, 0.5),
(0.1, 0.0, 0.3, 0.2, 0.4),
(0.1, 0.0, 0.3, 0.3, 0.3),
(0.1, 0.0, 0.3, 0.4, 0.2),
(0.1, 0.0, 0.3, 0.5, 0.1),
(0.1, 0.0, 0.3, 0.6, 0.0),
(0.1, 0.0, 0.4, 0.0, 0.5),
(0.1, 0.0, 0.4, 0.1, 0.4),
(0.1, 0.0, 0.4, 0.2, 0.3),
(0.1, 0.0, 0.4, 0.3, 0.2),
(0.1, 0.0, 0.4, 0.4, 0.1),
(0.1, 0.0, 0.4, 0.5, 0.0),
(0.1, 0.0, 0.5, 0.0, 0.4),
(0.1, 0.0, 0.5, 0.1, 0.3),
(0.1, 0.0, 0.5, 0.2, 0.2),
(0.1, 0.0, 0.5, 0.3, 0.1),
(0.1, 0.0, 0.5, 0.4, 0.0),
(0.1, 0.0, 0.6, 0.0, 0.3),
(0.1, 0.0, 0.6, 0.1, 0.2),
(0.1, 0.0, 0.6, 0.2, 0.1),
(0.1, 0.0, 0.6, 0.3, 0.0),
(0.1, 0.0, 0.7, 0.0, 0.2),
(0.1, 0.0, 0.7, 0.1, 0.1),
(0.1, 0.0, 0.7, 0.2, 0.0),
(0.1, 0.0, 0.8, 0.0, 0.1),
(0.1, 0.0, 0.8, 0.1, 0.0),
(0.1, 0.0, 0.9, 0.0, 0.0),
(0.1, 0.1, 0.0, 0.0, 0.8),
(0.1, 0.1, 0.0, 0.1, 0.7),
(0.1, 0.1, 0.0, 0.2, 0.6),
(0.1, 0.1, 0.0, 0.3, 0.5),
(0.1, 0.1, 0.0, 0.4, 0.4),
(0.1, 0.1, 0.0, 0.5, 0.3),
(0.1, 0.1, 0.0, 0.6, 0.2),
(0.1, 0.1, 0.0, 0.7, 0.1),
(0.1, 0.1, 0.0, 0.8, 0.0),
(0.1, 0.1, 0.1, 0.0, 0.7),
(0.1, 0.1, 0.1, 0.1, 0.6),
(0.1, 0.1, 0.1, 0.2, 0.5),
(0.1, 0.1, 0.1, 0.3, 0.4),
(0.1, 0.1, 0.1, 0.4, 0.3),
(0.1, 0.1, 0.1, 0.5, 0.2),
(0.1, 0.1, 0.1, 0.6, 0.1),
(0.1, 0.1, 0.1, 0.7, 0.0),
(0.1, 0.1, 0.2, 0.0, 0.6),
(0.1, 0.1, 0.2, 0.1, 0.5),
(0.1, 0.1, 0.2, 0.2, 0.4),
(0.1, 0.1, 0.2, 0.3, 0.3),
(0.1, 0.1, 0.2, 0.4, 0.2),
(0.1, 0.1, 0.2, 0.5, 0.1),
(0.1, 0.1, 0.2, 0.6, 0.0),
(0.1, 0.1, 0.3, 0.0, 0.5),
(0.1, 0.1, 0.3, 0.1, 0.4),
(0.1, 0.1, 0.3, 0.2, 0.3),
(0.1, 0.1, 0.3, 0.3, 0.2),
(0.1, 0.1, 0.3, 0.4, 0.1),
(0.1, 0.1, 0.3, 0.5, 0.0),
(0.1, 0.1, 0.4, 0.0, 0.4),
(0.1, 0.1, 0.4, 0.1, 0.3),
(0.1, 0.1, 0.4, 0.2, 0.2),
(0.1, 0.1, 0.4, 0.3, 0.1),
(0.1, 0.1, 0.4, 0.4, 0.0),
(0.1, 0.1, 0.5, 0.0, 0.3),
(0.1, 0.1, 0.5, 0.1, 0.2),
(0.1, 0.1, 0.5, 0.2, 0.1),
(0.1, 0.1, 0.5, 0.3, 0.0),
(0.1, 0.1, 0.6, 0.0, 0.2),
(0.1, 0.1, 0.6, 0.1, 0.1),
(0.1, 0.1, 0.6, 0.2, 0.0),
(0.1, 0.1, 0.7, 0.0, 0.1),
(0.1, 0.1, 0.7, 0.1, 0.0),
(0.1, 0.1, 0.8, 0.0, 0.0),
(0.1, 0.2, 0.0, 0.0, 0.7),
(0.1, 0.2, 0.0, 0.1, 0.6),
(0.1, 0.2, 0.0, 0.2, 0.5),
(0.1, 0.2, 0.0, 0.3, 0.4),
(0.1, 0.2, 0.0, 0.4, 0.3),
(0.1, 0.2, 0.0, 0.5, 0.2),
(0.1, 0.2, 0.0, 0.6, 0.1),
(0.1, 0.2, 0.0, 0.7, 0.0),
(0.1, 0.2, 0.1, 0.0, 0.6),
(0.1, 0.2, 0.1, 0.1, 0.5),
(0.1, 0.2, 0.1, 0.2, 0.4),
(0.1, 0.2, 0.1, 0.3, 0.3),
(0.1, 0.2, 0.1, 0.4, 0.2),
(0.1, 0.2, 0.1, 0.5, 0.1),
(0.1, 0.2, 0.1, 0.6, 0.0),
(0.1, 0.2, 0.2, 0.0, 0.5),
(0.1, 0.2, 0.2, 0.1, 0.4),
(0.1, 0.2, 0.2, 0.2, 0.3),
(0.1, 0.2, 0.2, 0.3, 0.2),
(0.1, 0.2, 0.2, 0.4, 0.1),
(0.1, 0.2, 0.2, 0.5, 0.0),
(0.1, 0.2, 0.3, 0.0, 0.4),
(0.1, 0.2, 0.3, 0.1, 0.3),
(0.1, 0.2, 0.3, 0.2, 0.2),
(0.1, 0.2, 0.3, 0.3, 0.1),
(0.1, 0.2, 0.3, 0.4, 0.0),
(0.1, 0.2, 0.4, 0.0, 0.3),
(0.1, 0.2, 0.4, 0.1, 0.2),
(0.1, 0.2, 0.4, 0.2, 0.1),
(0.1, 0.2, 0.4, 0.3, 0.0),
(0.1, 0.2, 0.5, 0.0, 0.2),
(0.1, 0.2, 0.5, 0.1, 0.1),
(0.1, 0.2, 0.5, 0.2, 0.0),
(0.1, 0.2, 0.6, 0.0, 0.1),
(0.1, 0.2, 0.6, 0.1, 0.0),
(0.1, 0.2, 0.7, 0.0, 0.0),
(0.1, 0.3, 0.0, 0.0, 0.6),
(0.1, 0.3, 0.0, 0.1, 0.5),
(0.1, 0.3, 0.0, 0.2, 0.4),
(0.1, 0.3, 0.0, 0.3, 0.3),
(0.1, 0.3, 0.0, 0.4, 0.2),
(0.1, 0.3, 0.0, 0.5, 0.1),
(0.1, 0.3, 0.0, 0.6, 0.0),
(0.1, 0.3, 0.1, 0.0, 0.5),
(0.1, 0.3, 0.1, 0.1, 0.4),
(0.1, 0.3, 0.1, 0.2, 0.3),
(0.1, 0.3, 0.1, 0.3, 0.2),
(0.1, 0.3, 0.1, 0.4, 0.1),
(0.1, 0.3, 0.1, 0.5, 0.0),
(0.1, 0.3, 0.2, 0.0, 0.4),
(0.1, 0.3, 0.2, 0.1, 0.3),
(0.1, 0.3, 0.2, 0.2, 0.2),
(0.1, 0.3, 0.2, 0.3, 0.1),
(0.1, 0.3, 0.2, 0.4, 0.0),
(0.1, 0.3, 0.3, 0.0, 0.3),
(0.1, 0.3, 0.3, 0.1, 0.2),
(0.1, 0.3, 0.3, 0.2, 0.1),
(0.1, 0.3, 0.3, 0.3, 0.0),
(0.1, 0.3, 0.4, 0.0, 0.2),
(0.1, 0.3, 0.4, 0.1, 0.1),
(0.1, 0.3, 0.4, 0.2, 0.0),
(0.1, 0.3, 0.5, 0.0, 0.1),
(0.1, 0.3, 0.5, 0.1, 0.0),
(0.1, 0.3, 0.6, 0.0, 0.0),
(0.1, 0.4, 0.0, 0.0, 0.5),
(0.1, 0.4, 0.0, 0.1, 0.4),
(0.1, 0.4, 0.0, 0.2, 0.3),
(0.1, 0.4, 0.0, 0.3, 0.2),
(0.1, 0.4, 0.0, 0.4, 0.1),
(0.1, 0.4, 0.0, 0.5, 0.0),
(0.1, 0.4, 0.1, 0.0, 0.4),
(0.1, 0.4, 0.1, 0.1, 0.3),
(0.1, 0.4, 0.1, 0.2, 0.2),
(0.1, 0.4, 0.1, 0.3, 0.1),
(0.1, 0.4, 0.1, 0.4, 0.0),
(0.1, 0.4, 0.2, 0.0, 0.3),
(0.1, 0.4, 0.2, 0.1, 0.2),
(0.1, 0.4, 0.2, 0.2, 0.1),
(0.1, 0.4, 0.2, 0.3, 0.0),
(0.1, 0.4, 0.3, 0.0, 0.2),
(0.1, 0.4, 0.3, 0.1, 0.1),
(0.1, 0.4, 0.3, 0.2, 0.0),
(0.1, 0.4, 0.4, 0.0, 0.1),
(0.1, 0.4, 0.4, 0.1, 0.0),
(0.1, 0.4, 0.5, 0.0, 0.0),
(0.1, 0.5, 0.0, 0.0, 0.4),
(0.1, 0.5, 0.0, 0.1, 0.3),
(0.1, 0.5, 0.0, 0.2, 0.2),
(0.1, 0.5, 0.0, 0.3, 0.1),
(0.1, 0.5, 0.0, 0.4, 0.0),
(0.1, 0.5, 0.1, 0.0, 0.3),
(0.1, 0.5, 0.1, 0.1, 0.2),
(0.1, 0.5, 0.1, 0.2, 0.1),
(0.1, 0.5, 0.1, 0.3, 0.0),
(0.1, 0.5, 0.2, 0.0, 0.2),
(0.1, 0.5, 0.2, 0.1, 0.1),
(0.1, 0.5, 0.2, 0.2, 0.0),
(0.1, 0.5, 0.3, 0.0, 0.1),
(0.1, 0.5, 0.3, 0.1, 0.0),
(0.1, 0.5, 0.4, 0.0, 0.0),
(0.1, 0.6, 0.0, 0.0, 0.3),
(0.1, 0.6, 0.0, 0.1, 0.2),
(0.1, 0.6, 0.0, 0.2, 0.1),
(0.1, 0.6, 0.0, 0.3, 0.0),
(0.1, 0.6, 0.1, 0.0, 0.2),
(0.1, 0.6, 0.1, 0.1, 0.1),
(0.1, 0.6, 0.1, 0.2, 0.0),
(0.1, 0.6, 0.2, 0.0, 0.1),
(0.1, 0.6, 0.2, 0.1, 0.0),
(0.1, 0.6, 0.3, 0.0, 0.0),
(0.1, 0.7, 0.0, 0.0, 0.2),
(0.1, 0.7, 0.0, 0.1, 0.1),
(0.1, 0.7, 0.0, 0.2, 0.0),
(0.1, 0.7, 0.1, 0.0, 0.1),
(0.1, 0.7, 0.1, 0.1, 0.0),
(0.1, 0.7, 0.2, 0.0, 0.0),
(0.1, 0.8, 0.0, 0.0, 0.1),
(0.1, 0.8, 0.0, 0.1, 0.0),
(0.1, 0.8, 0.1, 0.0, 0.0),
(0.1, 0.9, 0.0, 0.0, 0.0),
(0.2, 0.0, 0.0, 0.0, 0.8),
(0.2, 0.0, 0.0, 0.1, 0.7),
(0.2, 0.0, 0.0, 0.2, 0.6),
(0.2, 0.0, 0.0, 0.3, 0.5),
(0.2, 0.0, 0.0, 0.4, 0.4),
(0.2, 0.0, 0.0, 0.5, 0.3),
(0.2, 0.0, 0.0, 0.6, 0.2),
(0.2, 0.0, 0.0, 0.7, 0.1),
(0.2, 0.0, 0.0, 0.8, 0.0),
(0.2, 0.0, 0.1, 0.0, 0.7),
(0.2, 0.0, 0.1, 0.1, 0.6),
(0.2, 0.0, 0.1, 0.2, 0.5),
(0.2, 0.0, 0.1, 0.3, 0.4),
(0.2, 0.0, 0.1, 0.4, 0.3),
(0.2, 0.0, 0.1, 0.5, 0.2),
(0.2, 0.0, 0.1, 0.6, 0.1),
(0.2, 0.0, 0.1, 0.7, 0.0),
(0.2, 0.0, 0.2, 0.0, 0.6),
(0.2, 0.0, 0.2, 0.1, 0.5),
(0.2, 0.0, 0.2, 0.2, 0.4),
(0.2, 0.0, 0.2, 0.3, 0.3),
(0.2, 0.0, 0.2, 0.4, 0.2),
(0.2, 0.0, 0.2, 0.5, 0.1),
(0.2, 0.0, 0.2, 0.6, 0.0),
(0.2, 0.0, 0.3, 0.0, 0.5),
(0.2, 0.0, 0.3, 0.1, 0.4),
(0.2, 0.0, 0.3, 0.2, 0.3),
(0.2, 0.0, 0.3, 0.3, 0.2),
(0.2, 0.0, 0.3, 0.4, 0.1),
(0.2, 0.0, 0.3, 0.5, 0.0),
(0.2, 0.0, 0.4, 0.0, 0.4),
(0.2, 0.0, 0.4, 0.1, 0.3),
(0.2, 0.0, 0.4, 0.2, 0.2),
(0.2, 0.0, 0.4, 0.3, 0.1),
(0.2, 0.0, 0.4, 0.4, 0.0),
(0.2, 0.0, 0.5, 0.0, 0.3),
(0.2, 0.0, 0.5, 0.1, 0.2),
(0.2, 0.0, 0.5, 0.2, 0.1),
(0.2, 0.0, 0.5, 0.3, 0.0),
(0.2, 0.0, 0.6, 0.0, 0.2),
(0.2, 0.0, 0.6, 0.1, 0.1),
(0.2, 0.0, 0.6, 0.2, 0.0),
(0.2, 0.0, 0.7, 0.0, 0.1),
(0.2, 0.0, 0.7, 0.1, 0.0),
(0.2, 0.0, 0.8, 0.0, 0.0),
(0.2, 0.1, 0.0, 0.0, 0.7),
(0.2, 0.1, 0.0, 0.1, 0.6),
(0.2, 0.1, 0.0, 0.2, 0.5),
(0.2, 0.1, 0.0, 0.3, 0.4),
(0.2, 0.1, 0.0, 0.4, 0.3),
(0.2, 0.1, 0.0, 0.5, 0.2),
(0.2, 0.1, 0.0, 0.6, 0.1),
(0.2, 0.1, 0.0, 0.7, 0.0),
(0.2, 0.1, 0.1, 0.0, 0.6),
(0.2, 0.1, 0.1, 0.1, 0.5),
(0.2, 0.1, 0.1, 0.2, 0.4),
(0.2, 0.1, 0.1, 0.3, 0.3),
(0.2, 0.1, 0.1, 0.4, 0.2),
(0.2, 0.1, 0.1, 0.5, 0.1),
(0.2, 0.1, 0.1, 0.6, 0.0),
(0.2, 0.1, 0.2, 0.0, 0.5),
(0.2, 0.1, 0.2, 0.1, 0.4),
(0.2, 0.1, 0.2, 0.2, 0.3),
(0.2, 0.1, 0.2, 0.3, 0.2),
(0.2, 0.1, 0.2, 0.4, 0.1),
(0.2, 0.1, 0.2, 0.5, 0.0),
(0.2, 0.1, 0.3, 0.0, 0.4),
(0.2, 0.1, 0.3, 0.1, 0.3),
(0.2, 0.1, 0.3, 0.2, 0.2),
(0.2, 0.1, 0.3, 0.3, 0.1),
(0.2, 0.1, 0.3, 0.4, 0.0),
(0.2, 0.1, 0.4, 0.0, 0.3),
(0.2, 0.1, 0.4, 0.1, 0.2),
(0.2, 0.1, 0.4, 0.2, 0.1),
(0.2, 0.1, 0.4, 0.3, 0.0),
(0.2, 0.1, 0.5, 0.0, 0.2),
(0.2, 0.1, 0.5, 0.1, 0.1),
(0.2, 0.1, 0.5, 0.2, 0.0),
(0.2, 0.1, 0.6, 0.0, 0.1),
(0.2, 0.1, 0.6, 0.1, 0.0),
(0.2, 0.1, 0.7, 0.0, 0.0),
(0.2, 0.2, 0.0, 0.0, 0.6),
(0.2, 0.2, 0.0, 0.1, 0.5),
(0.2, 0.2, 0.0, 0.2, 0.4),
(0.2, 0.2, 0.0, 0.3, 0.3),
(0.2, 0.2, 0.0, 0.4, 0.2),
(0.2, 0.2, 0.0, 0.5, 0.1),
(0.2, 0.2, 0.0, 0.6, 0.0),
(0.2, 0.2, 0.1, 0.0, 0.5),
(0.2, 0.2, 0.1, 0.1, 0.4),
(0.2, 0.2, 0.1, 0.2, 0.3),
(0.2, 0.2, 0.1, 0.3, 0.2),
(0.2, 0.2, 0.1, 0.4, 0.1),
(0.2, 0.2, 0.1, 0.5, 0.0),
(0.2, 0.2, 0.2, 0.0, 0.4),
(0.2, 0.2, 0.2, 0.1, 0.3),
(0.2, 0.2, 0.2, 0.2, 0.2),
(0.2, 0.2, 0.2, 0.3, 0.1),
(0.2, 0.2, 0.2, 0.4, 0.0),
(0.2, 0.2, 0.3, 0.0, 0.3),
(0.2, 0.2, 0.3, 0.1, 0.2),
(0.2, 0.2, 0.3, 0.2, 0.1),
(0.2, 0.2, 0.3, 0.3, 0.0),
(0.2, 0.2, 0.4, 0.0, 0.2),
(0.2, 0.2, 0.4, 0.1, 0.1),
(0.2, 0.2, 0.4, 0.2, 0.0),
(0.2, 0.2, 0.5, 0.0, 0.1),
(0.2, 0.2, 0.5, 0.1, 0.0),
(0.2, 0.2, 0.6, 0.0, 0.0),
(0.2, 0.3, 0.0, 0.0, 0.5),
(0.2, 0.3, 0.0, 0.1, 0.4),
(0.2, 0.3, 0.0, 0.2, 0.3),
(0.2, 0.3, 0.0, 0.3, 0.2),
(0.2, 0.3, 0.0, 0.4, 0.1),
(0.2, 0.3, 0.0, 0.5, 0.0),
(0.2, 0.3, 0.1, 0.0, 0.4),
(0.2, 0.3, 0.1, 0.1, 0.3),
(0.2, 0.3, 0.1, 0.2, 0.2),
(0.2, 0.3, 0.1, 0.3, 0.1),
(0.2, 0.3, 0.1, 0.4, 0.0),
(0.2, 0.3, 0.2, 0.0, 0.3),
(0.2, 0.3, 0.2, 0.1, 0.2),
(0.2, 0.3, 0.2, 0.2, 0.1),
(0.2, 0.3, 0.2, 0.3, 0.0),
(0.2, 0.3, 0.3, 0.0, 0.2),
(0.2, 0.3, 0.3, 0.1, 0.1),
(0.2, 0.3, 0.3, 0.2, 0.0),
(0.2, 0.3, 0.4, 0.0, 0.1),
(0.2, 0.3, 0.4, 0.1, 0.0),
(0.2, 0.3, 0.5, 0.0, 0.0),
(0.2, 0.4, 0.0, 0.0, 0.4),
(0.2, 0.4, 0.0, 0.1, 0.3),
(0.2, 0.4, 0.0, 0.2, 0.2),
(0.2, 0.4, 0.0, 0.3, 0.1),
(0.2, 0.4, 0.0, 0.4, 0.0),
(0.2, 0.4, 0.1, 0.0, 0.3),
(0.2, 0.4, 0.1, 0.1, 0.2),
(0.2, 0.4, 0.1, 0.2, 0.1),
(0.2, 0.4, 0.1, 0.3, 0.0),
(0.2, 0.4, 0.2, 0.0, 0.2),
(0.2, 0.4, 0.2, 0.1, 0.1),
(0.2, 0.4, 0.2, 0.2, 0.0),
(0.2, 0.4, 0.3, 0.0, 0.1),
(0.2, 0.4, 0.3, 0.1, 0.0),
(0.2, 0.4, 0.4, 0.0, 0.0),
(0.2, 0.5, 0.0, 0.0, 0.3),
(0.2, 0.5, 0.0, 0.1, 0.2),
(0.2, 0.5, 0.0, 0.2, 0.1),
(0.2, 0.5, 0.0, 0.3, 0.0),
(0.2, 0.5, 0.1, 0.0, 0.2),
(0.2, 0.5, 0.1, 0.1, 0.1),
(0.2, 0.5, 0.1, 0.2, 0.0),
(0.2, 0.5, 0.2, 0.0, 0.1),
(0.2, 0.5, 0.2, 0.1, 0.0),
(0.2, 0.5, 0.3, 0.0, 0.0),
(0.2, 0.6, 0.0, 0.0, 0.2),
(0.2, 0.6, 0.0, 0.1, 0.1),
(0.2, 0.6, 0.0, 0.2, 0.0),
(0.2, 0.6, 0.1, 0.0, 0.1),
(0.2, 0.6, 0.1, 0.1, 0.0),
(0.2, 0.6, 0.2, 0.0, 0.0),
(0.2, 0.7, 0.0, 0.0, 0.1),
(0.2, 0.7, 0.0, 0.1, 0.0),
(0.2, 0.7, 0.1, 0.0, 0.0),
(0.2, 0.8, 0.0, 0.0, 0.0),
(0.3, 0.0, 0.0, 0.0, 0.7),
(0.3, 0.0, 0.0, 0.1, 0.6),
(0.3, 0.0, 0.0, 0.2, 0.5),
(0.3, 0.0, 0.0, 0.3, 0.4),
(0.3, 0.0, 0.0, 0.4, 0.3),
(0.3, 0.0, 0.0, 0.5, 0.2),
(0.3, 0.0, 0.0, 0.6, 0.1),
(0.3, 0.0, 0.0, 0.7, 0.0),
(0.3, 0.0, 0.1, 0.0, 0.6),
(0.3, 0.0, 0.1, 0.1, 0.5),
(0.3, 0.0, 0.1, 0.2, 0.4),
(0.3, 0.0, 0.1, 0.3, 0.3),
(0.3, 0.0, 0.1, 0.4, 0.2),
(0.3, 0.0, 0.1, 0.5, 0.1),
(0.3, 0.0, 0.1, 0.6, 0.0),
(0.3, 0.0, 0.2, 0.0, 0.5),
(0.3, 0.0, 0.2, 0.1, 0.4),
(0.3, 0.0, 0.2, 0.2, 0.3),
(0.3, 0.0, 0.2, 0.3, 0.2),
(0.3, 0.0, 0.2, 0.4, 0.1),
(0.3, 0.0, 0.2, 0.5, 0.0),
(0.3, 0.0, 0.3, 0.0, 0.4),
(0.3, 0.0, 0.3, 0.1, 0.3),
(0.3, 0.0, 0.3, 0.2, 0.2),
(0.3, 0.0, 0.3, 0.3, 0.1),
(0.3, 0.0, 0.3, 0.4, 0.0),
(0.3, 0.0, 0.4, 0.0, 0.3),
(0.3, 0.0, 0.4, 0.1, 0.2),
(0.3, 0.0, 0.4, 0.2, 0.1),
(0.3, 0.0, 0.4, 0.3, 0.0),
(0.3, 0.0, 0.5, 0.0, 0.2),
(0.3, 0.0, 0.5, 0.1, 0.1),
(0.3, 0.0, 0.5, 0.2, 0.0),
(0.3, 0.0, 0.6, 0.0, 0.1),
(0.3, 0.0, 0.6, 0.1, 0.0),
(0.3, 0.0, 0.7, 0.0, 0.0),
(0.3, 0.1, 0.0, 0.0, 0.6),
(0.3, 0.1, 0.0, 0.1, 0.5),
(0.3, 0.1, 0.0, 0.2, 0.4),
(0.3, 0.1, 0.0, 0.3, 0.3),
(0.3, 0.1, 0.0, 0.4, 0.2),
(0.3, 0.1, 0.0, 0.5, 0.1),
(0.3, 0.1, 0.0, 0.6, 0.0),
(0.3, 0.1, 0.1, 0.0, 0.5),
(0.3, 0.1, 0.1, 0.1, 0.4),
(0.3, 0.1, 0.1, 0.2, 0.3),
(0.3, 0.1, 0.1, 0.3, 0.2),
(0.3, 0.1, 0.1, 0.4, 0.1),
(0.3, 0.1, 0.1, 0.5, 0.0),
(0.3, 0.1, 0.2, 0.0, 0.4),
(0.3, 0.1, 0.2, 0.1, 0.3),
(0.3, 0.1, 0.2, 0.2, 0.2),
(0.3, 0.1, 0.2, 0.3, 0.1),
(0.3, 0.1, 0.2, 0.4, 0.0),
(0.3, 0.1, 0.3, 0.0, 0.3),
(0.3, 0.1, 0.3, 0.1, 0.2),
(0.3, 0.1, 0.3, 0.2, 0.1),
(0.3, 0.1, 0.3, 0.3, 0.0),
(0.3, 0.1, 0.4, 0.0, 0.2),
(0.3, 0.1, 0.4, 0.1, 0.1),
(0.3, 0.1, 0.4, 0.2, 0.0),
(0.3, 0.1, 0.5, 0.0, 0.1),
(0.3, 0.1, 0.5, 0.1, 0.0),
(0.3, 0.1, 0.6, 0.0, 0.0),
(0.3, 0.2, 0.0, 0.0, 0.5),
(0.3, 0.2, 0.0, 0.1, 0.4),
(0.3, 0.2, 0.0, 0.2, 0.3),
(0.3, 0.2, 0.0, 0.3, 0.2),
(0.3, 0.2, 0.0, 0.4, 0.1),
(0.3, 0.2, 0.0, 0.5, 0.0),
(0.3, 0.2, 0.1, 0.0, 0.4),
(0.3, 0.2, 0.1, 0.1, 0.3),
(0.3, 0.2, 0.1, 0.2, 0.2),
(0.3, 0.2, 0.1, 0.3, 0.1),
(0.3, 0.2, 0.1, 0.4, 0.0),
(0.3, 0.2, 0.2, 0.0, 0.3),
(0.3, 0.2, 0.2, 0.1, 0.2),
(0.3, 0.2, 0.2, 0.2, 0.1),
(0.3, 0.2, 0.2, 0.3, 0.0),
(0.3, 0.2, 0.3, 0.0, 0.2),
(0.3, 0.2, 0.3, 0.1, 0.1),
(0.3, 0.2, 0.3, 0.2, 0.0),
(0.3, 0.2, 0.4, 0.0, 0.1),
(0.3, 0.2, 0.4, 0.1, 0.0),
(0.3, 0.2, 0.5, 0.0, 0.0),
(0.3, 0.3, 0.0, 0.0, 0.4),
(0.3, 0.3, 0.0, 0.1, 0.3),
(0.3, 0.3, 0.0, 0.2, 0.2),
(0.3, 0.3, 0.0, 0.3, 0.1),
(0.3, 0.3, 0.0, 0.4, 0.0),
(0.3, 0.3, 0.1, 0.0, 0.3),
(0.3, 0.3, 0.1, 0.1, 0.2),
(0.3, 0.3, 0.1, 0.2, 0.1),
(0.3, 0.3, 0.1, 0.3, 0.0),
(0.3, 0.3, 0.2, 0.0, 0.2),
(0.3, 0.3, 0.2, 0.1, 0.1),
(0.3, 0.3, 0.2, 0.2, 0.0),
(0.3, 0.3, 0.3, 0.0, 0.1),
(0.3, 0.3, 0.3, 0.1, 0.0),
(0.3, 0.3, 0.4, 0.0, 0.0),
(0.3, 0.4, 0.0, 0.0, 0.3),
(0.3, 0.4, 0.0, 0.1, 0.2),
(0.3, 0.4, 0.0, 0.2, 0.1),
(0.3, 0.4, 0.0, 0.3, 0.0),
(0.3, 0.4, 0.1, 0.0, 0.2),
(0.3, 0.4, 0.1, 0.1, 0.1),
(0.3, 0.4, 0.1, 0.2, 0.0),
(0.3, 0.4, 0.2, 0.0, 0.1),
(0.3, 0.4, 0.2, 0.1, 0.0),
(0.3, 0.4, 0.3, 0.0, 0.0),
(0.3, 0.5, 0.0, 0.0, 0.2),
(0.3, 0.5, 0.0, 0.1, 0.1),
(0.3, 0.5, 0.0, 0.2, 0.0),
(0.3, 0.5, 0.1, 0.0, 0.1),
(0.3, 0.5, 0.1, 0.1, 0.0),
(0.3, 0.5, 0.2, 0.0, 0.0),
(0.3, 0.6, 0.0, 0.0, 0.1),
(0.3, 0.6, 0.0, 0.1, 0.0),
(0.3, 0.6, 0.1, 0.0, 0.0),
(0.3, 0.7, 0.0, 0.0, 0.0),
(0.4, 0.0, 0.0, 0.0, 0.6),
(0.4, 0.0, 0.0, 0.1, 0.5),
(0.4, 0.0, 0.0, 0.2, 0.4),
(0.4, 0.0, 0.0, 0.3, 0.3),
(0.4, 0.0, 0.0, 0.4, 0.2),
(0.4, 0.0, 0.0, 0.5, 0.1),
(0.4, 0.0, 0.0, 0.6, 0.0),
(0.4, 0.0, 0.1, 0.0, 0.5),
(0.4, 0.0, 0.1, 0.1, 0.4),
(0.4, 0.0, 0.1, 0.2, 0.3),
(0.4, 0.0, 0.1, 0.3, 0.2),
(0.4, 0.0, 0.1, 0.4, 0.1),
(0.4, 0.0, 0.1, 0.5, 0.0),
(0.4, 0.0, 0.2, 0.0, 0.4),
(0.4, 0.0, 0.2, 0.1, 0.3),
(0.4, 0.0, 0.2, 0.2, 0.2),
(0.4, 0.0, 0.2, 0.3, 0.1),
(0.4, 0.0, 0.2, 0.4, 0.0),
(0.4, 0.0, 0.3, 0.0, 0.3),
(0.4, 0.0, 0.3, 0.1, 0.2),
(0.4, 0.0, 0.3, 0.2, 0.1),
(0.4, 0.0, 0.3, 0.3, 0.0),
(0.4, 0.0, 0.4, 0.0, 0.2),
(0.4, 0.0, 0.4, 0.1, 0.1),
(0.4, 0.0, 0.4, 0.2, 0.0),
(0.4, 0.0, 0.5, 0.0, 0.1),
(0.4, 0.0, 0.5, 0.1, 0.0),
(0.4, 0.0, 0.6, 0.0, 0.0),
(0.4, 0.1, 0.0, 0.0, 0.5),
(0.4, 0.1, 0.0, 0.1, 0.4),
(0.4, 0.1, 0.0, 0.2, 0.3),
(0.4, 0.1, 0.0, 0.3, 0.2),
(0.4, 0.1, 0.0, 0.4, 0.1),
(0.4, 0.1, 0.0, 0.5, 0.0),
(0.4, 0.1, 0.1, 0.0, 0.4),
(0.4, 0.1, 0.1, 0.1, 0.3),
(0.4, 0.1, 0.1, 0.2, 0.2),
(0.4, 0.1, 0.1, 0.3, 0.1),
(0.4, 0.1, 0.1, 0.4, 0.0),
(0.4, 0.1, 0.2, 0.0, 0.3),
(0.4, 0.1, 0.2, 0.1, 0.2),
(0.4, 0.1, 0.2, 0.2, 0.1),
(0.4, 0.1, 0.2, 0.3, 0.0),
(0.4, 0.1, 0.3, 0.0, 0.2),
(0.4, 0.1, 0.3, 0.1, 0.1),
(0.4, 0.1, 0.3, 0.2, 0.0),
(0.4, 0.1, 0.4, 0.0, 0.1),
(0.4, 0.1, 0.4, 0.1, 0.0),
(0.4, 0.1, 0.5, 0.0, 0.0),
(0.4, 0.2, 0.0, 0.0, 0.4),
(0.4, 0.2, 0.0, 0.1, 0.3),
(0.4, 0.2, 0.0, 0.2, 0.2),
(0.4, 0.2, 0.0, 0.3, 0.1),
(0.4, 0.2, 0.0, 0.4, 0.0),
(0.4, 0.2, 0.1, 0.0, 0.3),
(0.4, 0.2, 0.1, 0.1, 0.2),
(0.4, 0.2, 0.1, 0.2, 0.1),
(0.4, 0.2, 0.1, 0.3, 0.0),
(0.4, 0.2, 0.2, 0.0, 0.2),
(0.4, 0.2, 0.2, 0.1, 0.1),
(0.4, 0.2, 0.2, 0.2, 0.0),
(0.4, 0.2, 0.3, 0.0, 0.1),
(0.4, 0.2, 0.3, 0.1, 0.0),
(0.4, 0.2, 0.4, 0.0, 0.0),
(0.4, 0.3, 0.0, 0.0, 0.3),
(0.4, 0.3, 0.0, 0.1, 0.2),
(0.4, 0.3, 0.0, 0.2, 0.1),
(0.4, 0.3, 0.0, 0.3, 0.0),
(0.4, 0.3, 0.1, 0.0, 0.2),
(0.4, 0.3, 0.1, 0.1, 0.1),
(0.4, 0.3, 0.1, 0.2, 0.0),
(0.4, 0.3, 0.2, 0.0, 0.1),
(0.4, 0.3, 0.2, 0.1, 0.0),
(0.4, 0.3, 0.3, 0.0, 0.0),
(0.4, 0.4, 0.0, 0.0, 0.2),
(0.4, 0.4, 0.0, 0.1, 0.1),
(0.4, 0.4, 0.0, 0.2, 0.0),
(0.4, 0.4, 0.1, 0.0, 0.1),
(0.4, 0.4, 0.1, 0.1, 0.0),
(0.4, 0.4, 0.2, 0.0, 0.0),
(0.4, 0.5, 0.0, 0.0, 0.1),
(0.4, 0.5, 0.0, 0.1, 0.0),
(0.4, 0.5, 0.1, 0.0, 0.0),
(0.4, 0.6, 0.0, 0.0, 0.0),
(0.5, 0.0, 0.0, 0.0, 0.5),
(0.5, 0.0, 0.0, 0.1, 0.4),
(0.5, 0.0, 0.0, 0.2, 0.3),
(0.5, 0.0, 0.0, 0.3, 0.2),
(0.5, 0.0, 0.0, 0.4, 0.1),
(0.5, 0.0, 0.0, 0.5, 0.0),
(0.5, 0.0, 0.1, 0.0, 0.4),
(0.5, 0.0, 0.1, 0.1, 0.3),
(0.5, 0.0, 0.1, 0.2, 0.2),
(0.5, 0.0, 0.1, 0.3, 0.1),
(0.5, 0.0, 0.1, 0.4, 0.0),
(0.5, 0.0, 0.2, 0.0, 0.3),
(0.5, 0.0, 0.2, 0.1, 0.2),
(0.5, 0.0, 0.2, 0.2, 0.1),
(0.5, 0.0, 0.2, 0.3, 0.0),
(0.5, 0.0, 0.3, 0.0, 0.2),
(0.5, 0.0, 0.3, 0.1, 0.1),
(0.5, 0.0, 0.3, 0.2, 0.0),
(0.5, 0.0, 0.4, 0.0, 0.1),
(0.5, 0.0, 0.4, 0.1, 0.0),
(0.5, 0.0, 0.5, 0.0, 0.0),
(0.5, 0.1, 0.0, 0.0, 0.4),
(0.5, 0.1, 0.0, 0.1, 0.3),
(0.5, 0.1, 0.0, 0.2, 0.2),
(0.5, 0.1, 0.0, 0.3, 0.1),
(0.5, 0.1, 0.0, 0.4, 0.0),
(0.5, 0.1, 0.1, 0.0, 0.3),
(0.5, 0.1, 0.1, 0.1, 0.2),
(0.5, 0.1, 0.1, 0.2, 0.1),
(0.5, 0.1, 0.1, 0.3, 0.0),
(0.5, 0.1, 0.2, 0.0, 0.2),
(0.5, 0.1, 0.2, 0.1, 0.1),
(0.5, 0.1, 0.2, 0.2, 0.0),
(0.5, 0.1, 0.3, 0.0, 0.1),
(0.5, 0.1, 0.3, 0.1, 0.0),
(0.5, 0.1, 0.4, 0.0, 0.0),
(0.5, 0.2, 0.0, 0.0, 0.3),
(0.5, 0.2, 0.0, 0.1, 0.2),
(0.5, 0.2, 0.0, 0.2, 0.1),
(0.5, 0.2, 0.0, 0.3, 0.0),
(0.5, 0.2, 0.1, 0.0, 0.2),
(0.5, 0.2, 0.1, 0.1, 0.1),
(0.5, 0.2, 0.1, 0.2, 0.0),
(0.5, 0.2, 0.2, 0.0, 0.1),
(0.5, 0.2, 0.2, 0.1, 0.0),
(0.5, 0.2, 0.3, 0.0, 0.0),
(0.5, 0.3, 0.0, 0.0, 0.2),
(0.5, 0.3, 0.0, 0.1, 0.1),
(0.5, 0.3, 0.0, 0.2, 0.0),
(0.5, 0.3, 0.1, 0.0, 0.1),
(0.5, 0.3, 0.1, 0.1, 0.0),
(0.5, 0.3, 0.2, 0.0, 0.0),
(0.5, 0.4, 0.0, 0.0, 0.1),
(0.5, 0.4, 0.0, 0.1, 0.0),
(0.5, 0.4, 0.1, 0.0, 0.0),
(0.5, 0.5, 0.0, 0.0, 0.0),
(0.6, 0.0, 0.0, 0.0, 0.4),
(0.6, 0.0, 0.0, 0.1, 0.3),
(0.6, 0.0, 0.0, 0.2, 0.2),
(0.6, 0.0, 0.0, 0.3, 0.1),
(0.6, 0.0, 0.0, 0.4, 0.0),
(0.6, 0.0, 0.1, 0.0, 0.3),
(0.6, 0.0, 0.1, 0.1, 0.2),
(0.6, 0.0, 0.1, 0.2, 0.1),
(0.6, 0.0, 0.1, 0.3, 0.0),
(0.6, 0.0, 0.2, 0.0, 0.2),
(0.6, 0.0, 0.2, 0.1, 0.1),
(0.6, 0.0, 0.2, 0.2, 0.0),
(0.6, 0.0, 0.3, 0.0, 0.1),
(0.6, 0.0, 0.3, 0.1, 0.0),
(0.6, 0.0, 0.4, 0.0, 0.0),
(0.6, 0.1, 0.0, 0.0, 0.3),
(0.6, 0.1, 0.0, 0.1, 0.2),
(0.6, 0.1, 0.0, 0.2, 0.1),
(0.6, 0.1, 0.0, 0.3, 0.0),
(0.6, 0.1, 0.1, 0.0, 0.2),
(0.6, 0.1, 0.1, 0.1, 0.1),
(0.6, 0.1, 0.1, 0.2, 0.0),
(0.6, 0.1, 0.2, 0.0, 0.1),
(0.6, 0.1, 0.2, 0.1, 0.0),
(0.6, 0.1, 0.3, 0.0, 0.0),
(0.6, 0.2, 0.0, 0.0, 0.2),
(0.6, 0.2, 0.0, 0.1, 0.1),
(0.6, 0.2, 0.0, 0.2, 0.0),
(0.6, 0.2, 0.1, 0.0, 0.1),
(0.6, 0.2, 0.1, 0.1, 0.0),
(0.6, 0.2, 0.2, 0.0, 0.0),
(0.6, 0.3, 0.0, 0.0, 0.1),
(0.6, 0.3, 0.0, 0.1, 0.0),
(0.6, 0.3, 0.1, 0.0, 0.0),
(0.6, 0.4, 0.0, 0.0, 0.0),
(0.7, 0.0, 0.0, 0.0, 0.3),
(0.7, 0.0, 0.0, 0.1, 0.2),
(0.7, 0.0, 0.0, 0.2, 0.1),
(0.7, 0.0, 0.0, 0.3, 0.0),
(0.7, 0.0, 0.1, 0.0, 0.2),
(0.7, 0.0, 0.1, 0.1, 0.1),
(0.7, 0.0, 0.1, 0.2, 0.0),
(0.7, 0.0, 0.2, 0.0, 0.1),
(0.7, 0.0, 0.2, 0.1, 0.0),
(0.7, 0.0, 0.3, 0.0, 0.0),
(0.7, 0.1, 0.0, 0.0, 0.2),
(0.7, 0.1, 0.0, 0.1, 0.1),
(0.7, 0.1, 0.0, 0.2, 0.0),
(0.7, 0.1, 0.1, 0.0, 0.1),
(0.7, 0.1, 0.1, 0.1, 0.0),
(0.7, 0.1, 0.2, 0.0, 0.0),
(0.7, 0.2, 0.0, 0.0, 0.1),
(0.7, 0.2, 0.0, 0.1, 0.0),
(0.7, 0.2, 0.1, 0.0, 0.0),
(0.7, 0.3, 0.0, 0.0, 0.0),
(0.8, 0.0, 0.0, 0.0, 0.2),
(0.8, 0.0, 0.0, 0.1, 0.1),
(0.8, 0.0, 0.0, 0.2, 0.0),
(0.8, 0.0, 0.1, 0.0, 0.1),
(0.8, 0.0, 0.1, 0.1, 0.0),
(0.8, 0.0, 0.2, 0.0, 0.0),
(0.8, 0.1, 0.0, 0.0, 0.1),
(0.8, 0.1, 0.0, 0.1, 0.0),
(0.8, 0.1, 0.1, 0.0, 0.0),
(0.8, 0.2, 0.0, 0.0, 0.0),
(0.9, 0.0, 0.0, 0.0, 0.1),
(0.9, 0.0, 0.0, 0.1, 0.0),
(0.9, 0.0, 0.1, 0.0, 0.0),
(0.9, 0.1, 0.0, 0.0, 0.0),
(1.0, 0.0, 0.0, 0.0, 0.0)
)
| 30.902488
| 30
| 0.322761
| 10,014
| 31,057
| 1.000899
| 0.001398
| 0.570488
| 0.52619
| 0.262197
| 0.998703
| 0.998703
| 0.998703
| 0.998703
| 0.998603
| 0.998603
| 0
| 0.454356
| 0.290337
| 31,057
| 1,004
| 31
| 30.933267
| 0.000408
| 0.000322
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
5afa42f9ecb6f39d431db39a6f61afac94047040
| 34,781
|
py
|
Python
|
sdk/machinelearning/azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/operations/operationalization_clusters_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/machinelearning/azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/operations/operationalization_clusters_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/machinelearning/azure-mgmt-machinelearningcompute/azure/mgmt/machinelearningcompute/operations/operationalization_clusters_operations.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 3
|
2016-05-03T20:49:46.000Z
|
2017-10-05T21:05:27.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class OperationalizationClustersOperations(object):
"""OperationalizationClustersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: The version of the Microsoft.MachineLearningCompute resource provider API to use. Constant value: "2017-08-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-08-01-preview"
self.config = config
def create_or_update(
self, resource_group_name, cluster_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Create or update an operationalization cluster.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param parameters: Parameters supplied to create or update an
Operationalization cluster.
:type parameters:
~azure.mgmt.machinelearningcompute.models.OperationalizationCluster
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
OperationalizationCluster or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningcompute.models.OperationalizationCluster]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseWrapperException<azure.mgmt.machinelearningcompute.models.ErrorResponseWrapperException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'OperationalizationCluster')
# Construct and send request
def long_running_send():
request = self._client.put(url, query_parameters)
return self._client.send(
request, header_parameters, body_content, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
raise models.ErrorResponseWrapperException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationalizationCluster', response)
if response.status_code == 201:
deserialized = self._deserialize('OperationalizationCluster', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config):
"""Gets the operationalization cluster resource view. Note that the
credentials are not returned by this call. Call ListKeys to get them.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: OperationalizationCluster or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.machinelearningcompute.models.OperationalizationCluster or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseWrapperException<azure.mgmt.machinelearningcompute.models.ErrorResponseWrapperException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseWrapperException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationalizationCluster', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, cluster_name, tags=None, custom_headers=None, raw=False, **operation_config):
"""The PATCH operation can be used to update only the tags for a cluster.
Use PUT operation to update other properties.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param tags: Gets or sets a list of key value pairs that describe the
resource. These tags can be used in viewing and grouping this resource
(across resource groups). A maximum of 15 tags can be provided for a
resource. Each tag must have a key no greater in length than 128
characters and a value no greater in length than 256 characters.
:type tags: dict[str, str]
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: OperationalizationCluster or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.machinelearningcompute.models.OperationalizationCluster or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseWrapperException<azure.mgmt.machinelearningcompute.models.ErrorResponseWrapperException>`
"""
parameters = models.OperationalizationClusterUpdateParameters(tags=tags)
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'OperationalizationClusterUpdateParameters')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, **operation_config)
if response.status_code not in [200]:
raise models.ErrorResponseWrapperException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationalizationCluster', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def delete(
self, resource_group_name, cluster_name, delete_all=None, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified cluster.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param delete_all: If true, deletes all resources associated with this
cluster.
:type delete_all: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`ErrorResponseWrapperException<azure.mgmt.machinelearningcompute.models.ErrorResponseWrapperException>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if delete_all is not None:
query_parameters['deleteAll'] = self._serialize.query("delete_all", delete_all, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.delete(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [202, 204]:
raise models.ErrorResponseWrapperException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
client_raw_response.add_headers({
'Location': 'str',
})
return client_raw_response
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_keys(
self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config):
"""Gets the credentials for the specified cluster such as Storage, ACR and
ACS credentials. This is a long running operation because it fetches
keys from dependencies.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: OperationalizationClusterCredentials or ClientRawResponse if
raw=true
:rtype:
~azure.mgmt.machinelearningcompute.models.OperationalizationClusterCredentials
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}/listKeys'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('OperationalizationClusterCredentials', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def check_system_services_updates_available(
self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config):
"""Checks if updates are available for system services in the cluster.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: CheckSystemServicesUpdatesAvailableResponse or
ClientRawResponse if raw=true
:rtype:
~azure.mgmt.machinelearningcompute.models.CheckSystemServicesUpdatesAvailableResponse
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}/checkSystemServicesUpdatesAvailable'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('CheckSystemServicesUpdatesAvailableResponse', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update_system_services(
self, resource_group_name, cluster_name, custom_headers=None, raw=False, **operation_config):
"""Updates system services in a cluster.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param cluster_name: The name of the cluster.
:type cluster_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns
UpdateSystemServicesResponse or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.machinelearningcompute.models.UpdateSystemServicesResponse]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters/{clusterName}/updateSystemServices'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'clusterName': self._serialize.url("cluster_name", cluster_name, 'str', max_length=90, min_length=1, pattern=r'^[a-zA-Z][-\w\._\(\)]+[a-zA-Z0-9]$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
def long_running_send():
request = self._client.post(url, query_parameters)
return self._client.send(request, header_parameters, **operation_config)
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
return self._client.send(
request, header_parameters, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
header_dict = {}
if response.status_code == 200:
deserialized = self._deserialize('UpdateSystemServicesResponse', response)
header_dict = {
'Location': 'str',
}
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
client_raw_response.add_headers(header_dict)
return client_raw_response
return deserialized
if raw:
response = long_running_send()
return get_long_running_output(response)
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_by_resource_group(
self, resource_group_name, skiptoken=None, custom_headers=None, raw=False, **operation_config):
"""Gets the clusters in the specified resource group.
:param resource_group_name: Name of the resource group in which the
cluster is located.
:type resource_group_name: str
:param skiptoken: Continuation token for pagination.
:type skiptoken: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of OperationalizationCluster
:rtype:
~azure.mgmt.machinelearningcompute.models.OperationalizationClusterPaged[~azure.mgmt.machinelearningcompute.models.OperationalizationCluster]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningCompute/operationalizationClusters'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if skiptoken is not None:
query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.OperationalizationClusterPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.OperationalizationClusterPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list_by_subscription_id(
self, skiptoken=None, custom_headers=None, raw=False, **operation_config):
"""Gets the operationalization clusters in the specified subscription.
:param skiptoken: Continuation token for pagination.
:type skiptoken: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of OperationalizationCluster
:rtype:
~azure.mgmt.machinelearningcompute.models.OperationalizationClusterPaged[~azure.mgmt.machinelearningcompute.models.OperationalizationCluster]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningCompute/operationalizationClusters'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if skiptoken is not None:
query_parameters['$skiptoken'] = self._serialize.query("skiptoken", skiptoken, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.OperationalizationClusterPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.OperationalizationClusterPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| 48.576816
| 202
| 0.671056
| 3,647
| 34,781
| 6.187003
| 0.072937
| 0.025705
| 0.030137
| 0.028718
| 0.884329
| 0.877859
| 0.867931
| 0.857782
| 0.848608
| 0.848608
| 0
| 0.005969
| 0.234151
| 34,781
| 715
| 203
| 48.644755
| 0.841123
| 0.269975
| 0
| 0.827027
| 0
| 0
| 0.183637
| 0.109949
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056757
| false
| 0
| 0.013514
| 0
| 0.159459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5aff9747fd0be0b2af66fb74e8f9516adfb401ea
| 5,208
|
py
|
Python
|
test/test_sequence_alignment.py
|
DavidHribek/pero-ocr
|
8d274282813878b3e31dd560563a36b3f02e5c33
|
[
"BSD-3-Clause"
] | 27
|
2020-03-20T08:25:39.000Z
|
2022-03-08T11:30:50.000Z
|
test/test_sequence_alignment.py
|
DavidHribek/pero-ocr
|
8d274282813878b3e31dd560563a36b3f02e5c33
|
[
"BSD-3-Clause"
] | 28
|
2020-02-11T17:27:35.000Z
|
2022-02-09T23:36:24.000Z
|
test/test_sequence_alignment.py
|
DavidHribek/pero-ocr
|
8d274282813878b3e31dd560563a36b3f02e5c33
|
[
"BSD-3-Clause"
] | 9
|
2020-03-16T12:22:03.000Z
|
2022-03-16T12:49:06.000Z
|
import unittest
from pero_ocr.sequence_alignment import levenshtein_distance
from pero_ocr.sequence_alignment import levenshtein_alignment
from pero_ocr.sequence_alignment import levenshtein_alignment_path
class TestLevenshteinDistance(unittest.TestCase):
def test_trivial_match(self):
a = ['a']
b = ['a']
self.assertEqual(levenshtein_distance(a, b), 0)
def test_trivial_substitution(self):
a = ['a']
b = ['b']
self.assertEqual(levenshtein_distance(a, b), 1)
def test_trivial_insertion(self):
a = ['a']
b = ['b', 'a']
self.assertEqual(levenshtein_distance(a, b), 1)
def test_trivial_deletion(self):
a = ['a', 'b']
b = ['a']
self.assertEqual(levenshtein_distance(a, b), 1)
def test_inner_replacement(self):
a = ['a', 'b', 'c']
b = ['a', 'x', 'y', 'c']
self.assertEqual(levenshtein_distance(a, b), 2)
def test_inner_replacement_rev(self):
a = ['a', 'b', 'c']
b = ['a', 'x', 'y', 'c']
self.assertEqual(levenshtein_distance(b, a), 2)
def test_deletion_only(self):
a = ['a', 'b', 'c']
b = []
self.assertEqual(levenshtein_distance(a, b), 3)
def test_insertion_only(self):
a = []
b = ['a', 'b', 'c']
self.assertEqual(levenshtein_distance(a, b), 3)
class TestLevenshteinAlignment(unittest.TestCase):
def test_trivial_match(self):
a = ['a']
b = ['a']
self.assertEqual(levenshtein_alignment(a, b), [('a', 'a')])
def test_trivial_substitution(self):
a = ['a']
b = ['b']
self.assertEqual(levenshtein_alignment(a, b), [('a', 'b')])
def test_trivial_insertion(self):
a = ['a']
b = ['b', 'a']
self.assertEqual(levenshtein_alignment(a, b), [(None, 'b'), ('a', 'a')])
def test_trivial_deletion(self):
a = ['a', 'b']
b = ['a']
self.assertEqual(levenshtein_alignment(a, b), [('a', 'a'), ('b', None)])
def test_inner_replacement(self):
a = ['a', 'b', 'c']
b = ['a', 'x', 'y', 'c']
self.assertTrue(
levenshtein_alignment(a, b) in [
[('a', 'a'), ('b', 'x'), (None, 'y'), ('c', 'c')],
[('a', 'a'), (None, 'x'), ('b', 'y'), ('c', 'c')],
]
)
def test_inner_replacement_rev(self):
a = ['a', 'x', 'y', 'c']
b = ['a', 'b', 'c']
self.assertTrue(
levenshtein_alignment(a, b) in [
[('a', 'a'), ('x', None), ('y', 'b'), ('c', 'c')],
[('a', 'a'), ('x', 'b'), ('y', None), ('c', 'c')],
]
)
def test_deletion_only(self):
a = ['a', 'b', 'c']
b = []
self.assertEqual(levenshtein_alignment(a, b), [('a', None), ('b', None), ('c', None)])
def test_insertion_only(self):
a = []
b = ['a', 'b', 'c']
self.assertEqual(levenshtein_alignment(a, b), [(None, 'a'), (None, 'b'), (None, 'c')])
def test_alignment_to_eps(self):
a = ['a', None, 'c']
b = ['a', 'b', 'c']
self.assertEqual(levenshtein_alignment(a, b), [('a', 'a'), (None, 'b'), ('c', 'c')])
def test_alignment_to_eps_rev(self):
a = ['a', 'b', 'c']
b = ['a', None, 'c']
self.assertEqual(levenshtein_alignment(a, b), [('a', 'a'), ('b', None), ('c', 'c')])
class TestLevenshteinAlignmentPath(unittest.TestCase):
def test_trivial_match(self):
a = ['a']
b = ['a']
self.assertEqual(levenshtein_alignment_path(a, b), [0])
def test_trivial_substitution(self):
a = ['a']
b = ['b']
self.assertEqual(levenshtein_alignment_path(a, b), [0])
def test_trivial_insertion(self):
a = ['a']
b = ['b', 'a']
self.assertEqual(levenshtein_alignment_path(a, b), [-1, 0])
def test_trivial_deletion(self):
a = ['a', 'b']
b = ['a']
self.assertEqual(levenshtein_alignment_path(a, b), [0, 1])
def test_inner_replacement(self):
a = ['a', 'b', 'c']
b = ['a', 'x', 'y', 'c']
self.assertTrue(
levenshtein_alignment_path(a, b) in [
[0, 0, -1, 0],
[0, -1, 0, 0],
]
)
def test_inner_replacement_rev(self):
a = ['a', 'x', 'y', 'c']
b = ['a', 'b', 'c']
self.assertTrue(
levenshtein_alignment_path(a, b) in [
[0, 1, 0, 0],
[0, 0, 1, 0],
]
)
def test_deletion_only(self):
a = ['a', 'b', 'c']
b = []
self.assertEqual(levenshtein_alignment_path(a, b), [1, 1, 1])
def test_insertion_only(self):
a = []
b = ['a', 'b', 'c']
self.assertEqual(levenshtein_alignment_path(a, b), [-1, -1, -1])
def test_alignment_to_eps(self):
a = ['a', None, 'c']
b = ['a', 'b', 'c']
self.assertEqual(levenshtein_alignment_path(a, b), [0, 0, 0])
def test_alignment_to_eps_rev(self):
a = ['a', 'b', 'c']
b = ['a', None, 'c']
self.assertEqual(levenshtein_alignment_path(a, b), [0, 0, 0])
| 29.76
| 94
| 0.493856
| 650
| 5,208
| 3.783077
| 0.064615
| 0.050427
| 0.061
| 0.05978
| 0.926393
| 0.915413
| 0.913379
| 0.882066
| 0.821472
| 0.820659
| 0
| 0.011542
| 0.301267
| 5,208
| 174
| 95
| 29.931034
| 0.664193
| 0
| 0
| 0.726619
| 0
| 0
| 0.03149
| 0
| 0
| 0
| 0
| 0
| 0.201439
| 1
| 0.201439
| false
| 0
| 0.028777
| 0
| 0.251799
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
51d4e3f5b27effd9bda7762ef8f370ce4967d20e
| 5,956
|
py
|
Python
|
yt/utilities/answer_testing/boolean_region_tests.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
yt/utilities/answer_testing/boolean_region_tests.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | 1
|
2016-04-05T22:30:14.000Z
|
2016-04-05T22:30:14.000Z
|
yt/utilities/answer_testing/boolean_region_tests.py
|
danielgrassinger/yt_new_frontend
|
5f91d2fb8721c4c5da0af543a6256ed979cd9fc9
|
[
"BSD-3-Clause-Clear"
] | 1
|
2020-12-05T05:51:09.000Z
|
2020-12-05T05:51:09.000Z
|
from __future__ import absolute_import
from yt.mods import *
import matplotlib
import pylab
from .output_tests import SingleOutputTest, YTDatasetTest, create_test
import hashlib
import numpy as np
# Tests to make sure that grid quantities are identical that should
# be identical for the AND operator.
class TestBooleanANDGridQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
re = self.ds.boolean([re1, "AND", re2])
# re should look like re2.
x2 = re2['x']
x = re['x']
x2 = x2[x2.argsort()]
x = x[x.argsort()]
self.result = (x2, x)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
# OR
class TestBooleanORGridQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
re = self.ds.boolean([re1, "OR", re2])
# re should look like re1
x1 = re1['x']
x = re['x']
x1 = x1[x1.argsort()]
x = x[x.argsort()]
self.result = (x1, x)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
# NOT
class TestBooleanNOTGridQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
# Bottom base
re3 = self.ds.region(five, four, [six[0], six[1], five[2]])
# Side
re4 = self.ds.region(five, [four[0], four[1], five[2]],
[five[0], six[1], six[2]])
# Last small cube
re5 = self.ds.region(five, [five[0], four[0], four[2]],
[six[0], five[1], six[2]])
# re1 NOT re2 should look like re3 OR re4 OR re5
re = self.ds.boolean([re1, "NOT", re2])
reo = self.ds.boolean([re3, "OR", re4, "OR", re5])
x = re['x']
xo = reo['x']
x = x[x.argsort()]
xo = xo[xo.argsort()]
self.result = (x, xo)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
# Tests to make sure that particle quantities are identical that should
# be identical for the AND operator.
class TestBooleanANDParticleQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
re = self.ds.boolean([re1, "AND", re2])
# re should look like re2.
x2 = re2['particle_position_x']
x = re['particle_position_x']
x2 = x2[x2.argsort()]
x = x[x.argsort()]
self.result = (x2, x)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
# OR
class TestBooleanORParticleQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
re = self.ds.boolean([re1, "OR", re2])
# re should look like re1
x1 = re1['particle_position_x']
x = re['particle_position_x']
x1 = x1[x1.argsort()]
x = x[x.argsort()]
self.result = (x1, x)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
# NOT
class TestBooleanNOTParticleQuantity(YTDatasetTest):
def run(self):
domain = self.ds.domain_right_edge - self.ds.domain_left_edge
four = 0.4 * domain + self.ds.domain_left_edge
five = 0.5 * domain + self.ds.domain_left_edge
six = 0.6 * domain + self.ds.domain_left_edge
re1 = self.ds.region(five, four, six)
re2 = self.ds.region(five, five, six)
# Bottom base
re3 = self.ds.region(five, four, [six[0], six[1], five[2]])
# Side
re4 = self.ds.region(five, [four[0], four[1], five[2]],
[five[0], six[1], six[2]])
# Last small cube
re5 = self.ds.region(five, [five[0], four[0], four[2]],
[six[0], five[1], six[2]])
# re1 NOT re2 should look like re3 OR re4 OR re5
re = self.ds.boolean([re1, "NOT", re2])
reo = self.ds.boolean([re3, "OR", re4, "OR", re5])
x = re['particle_position_x']
xo = reo['particle_position_x']
x = x[x.argsort()]
xo = xo[xo.argsort()]
self.result = (x, xo)
def compare(self, old_result):
self.compare_array_delta(self.result[0], self.result[1], 1e-10)
def plot(self):
return []
| 35.664671
| 71
| 0.5863
| 865
| 5,956
| 3.924855
| 0.105202
| 0.098969
| 0.106038
| 0.127246
| 0.889838
| 0.873638
| 0.873638
| 0.873638
| 0.85243
| 0.85243
| 0
| 0.039434
| 0.276192
| 5,956
| 166
| 72
| 35.879518
| 0.748086
| 0.080087
| 0
| 0.850394
| 0
| 0
| 0.026388
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.141732
| false
| 0
| 0.055118
| 0.047244
| 0.291339
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c6b1e6dc4615fe67f072bc9dd409992ed042723
| 83
|
py
|
Python
|
hotair/template/utils.py
|
serviper/hota
|
b132d94af7217ce90636bf1af4f207dc01d00116
|
[
"MIT"
] | null | null | null |
hotair/template/utils.py
|
serviper/hota
|
b132d94af7217ce90636bf1af4f207dc01d00116
|
[
"MIT"
] | null | null | null |
hotair/template/utils.py
|
serviper/hota
|
b132d94af7217ce90636bf1af4f207dc01d00116
|
[
"MIT"
] | null | null | null |
from secrets import token_urlsafe
def make_nonce():
return token_urlsafe(32)
| 13.833333
| 33
| 0.771084
| 12
| 83
| 5.083333
| 0.833333
| 0.393443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028986
| 0.168675
| 83
| 5
| 34
| 16.6
| 0.855072
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
5c72749fe2683032396d221d0e2fb2ea3d8783fb
| 41
|
py
|
Python
|
pyqt_listwidget_and_stackedwidget/__init__.py
|
yjg30737/pyqt-listwidget-and-stackedwidget
|
6675da178a8e73b2f9abecdee001595c43550ac5
|
[
"MIT"
] | null | null | null |
pyqt_listwidget_and_stackedwidget/__init__.py
|
yjg30737/pyqt-listwidget-and-stackedwidget
|
6675da178a8e73b2f9abecdee001595c43550ac5
|
[
"MIT"
] | null | null | null |
pyqt_listwidget_and_stackedwidget/__init__.py
|
yjg30737/pyqt-listwidget-and-stackedwidget
|
6675da178a8e73b2f9abecdee001595c43550ac5
|
[
"MIT"
] | null | null | null |
from .listWidgetAndStackedWidget import *
| 41
| 41
| 0.878049
| 3
| 41
| 12
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5c72f1fe9c9a21fec709642745edccb868cd57fd
| 8,469
|
py
|
Python
|
tests/unit/test_fp16.py
|
mbeacom/DeepSpeed
|
012d91df67a9ddd66df847c7608481af027cace9
|
[
"MIT"
] | null | null | null |
tests/unit/test_fp16.py
|
mbeacom/DeepSpeed
|
012d91df67a9ddd66df847c7608481af027cace9
|
[
"MIT"
] | null | null | null |
tests/unit/test_fp16.py
|
mbeacom/DeepSpeed
|
012d91df67a9ddd66df847c7608481af027cace9
|
[
"MIT"
] | null | null | null |
import torch
import deepspeed
import argparse
import pytest
import json
import os
from common import distributed_test
from simple_model import SimpleModel, random_dataloader, args_from_dict
def test_lamb_fp16_basic(tmpdir):
config_dict = {
"train_batch_size": 2,
"steps_per_print": 1,
"optimizer": {
"type": "Lamb",
"params": {
"lr": 0.00015,
"max_grad_norm": 1.0
}
},
"fp16": {
"enabled": True
}
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=False)
@distributed_test(world_size=[1, 2])
def _test_lamb_fp16_basic(args, model, hidden_dim):
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
model_parameters=model.parameters())
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_lamb_fp16_basic(args=args, model=model, hidden_dim=hidden_dim)
def test_lamb_fp16_empty_grad(tmpdir):
config_dict = {
"train_batch_size": 1,
"steps_per_print": 1,
"optimizer": {
"type": "Lamb",
"params": {
"lr": 0.00015,
"max_grad_norm": 1.0
}
},
"fp16": {
"enabled": True
}
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=True)
@distributed_test(world_size=[1])
def _test_lamb_fp16_empty_grad(args, model, hidden_dim):
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
model_parameters=model.parameters())
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_lamb_fp16_empty_grad(args=args, model=model, hidden_dim=hidden_dim)
def test_adamw_fp16_basic(tmpdir):
config_dict = {
"train_batch_size": 1,
"steps_per_print": 1,
"fp16": {
"enabled": True
}
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=False)
@distributed_test(world_size=[1])
def _test_adamw_fp16_basic(args, model, hidden_dim):
optimizer = torch.optim.AdamW(params=model.parameters())
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
optimizer=optimizer)
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_adamw_fp16_basic(args=args, model=model, hidden_dim=hidden_dim)
def test_adamw_fp16_empty_grad(tmpdir):
config_dict = {
"train_batch_size": 1,
"steps_per_print": 1,
"fp16": {
"enabled": True
}
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=True)
@distributed_test(world_size=[1])
def _test_adamw_fp16_empty_grad(args, model, hidden_dim):
optimizer = torch.optim.AdamW(params=model.parameters())
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
optimizer=optimizer)
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_adamw_fp16_empty_grad(args=args, model=model, hidden_dim=hidden_dim)
def test_adam_fp16_onecycle_compatibility(tmpdir):
config_dict = {
"train_batch_size": 1,
"steps_per_print": 1,
"optimizer": {
"type": "Adam",
"params": {
"lr": 0.00015
}
},
"scheduler": {
"type": "OneCycle",
"params": {
"cycle_first_step_size": 16000,
"cycle_first_stair_count": 8000,
"decay_step_size": 16000,
"cycle_min_lr": 1e-06,
"cycle_max_lr": 3e-05,
"decay_lr_rate": 1e-07,
"cycle_min_mom": 0.85,
"cycle_max_mom": 0.99,
"decay_mom_rate": 0.0
}
},
"fp16": {
"enabled": True
},
"zero_optimization": False
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=True)
@distributed_test(world_size=[1])
def _test_adam_fp16_onecycle_compatibility(args, model, hidden_dim):
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
model_parameters=model.parameters())
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_adam_fp16_onecycle_compatibility(args=args, model=model, hidden_dim=hidden_dim)
def test_adam_fp16_zero_onecycle_compatibility(tmpdir):
config_dict = {
"train_batch_size": 1,
"steps_per_print": 1,
"optimizer": {
"type": "Adam",
"params": {
"lr": 0.00015
}
},
"scheduler": {
"type": "OneCycle",
"params": {
"cycle_first_step_size": 16000,
"cycle_first_stair_count": 8000,
"decay_step_size": 16000,
"cycle_min_lr": 1e-06,
"cycle_max_lr": 3e-05,
"decay_lr_rate": 1e-07,
"cycle_min_mom": 0.85,
"cycle_max_mom": 0.99,
"decay_mom_rate": 0.0
}
},
"fp16": {
"enabled": True
},
"zero_optimization": True
}
args = args_from_dict(tmpdir, config_dict)
hidden_dim = 10
model = SimpleModel(hidden_dim, empty_grad=True)
@distributed_test(world_size=[1])
def _test_adam_fp16_zero_onecycle_compatibility(args, model, hidden_dim):
model, _, _,_ = deepspeed.initialize(args=args,
model=model,
model_parameters=model.parameters())
data_loader = random_dataloader(model=model,
total_samples=50,
hidden_dim=hidden_dim,
device=model.device)
for n, batch in enumerate(data_loader):
loss = model(batch[0], batch[1])
model.backward(loss)
model.step()
_test_adam_fp16_zero_onecycle_compatibility(args=args,
model=model,
hidden_dim=hidden_dim)
| 34.012048
| 89
| 0.503601
| 835
| 8,469
| 4.765269
| 0.106587
| 0.094999
| 0.048253
| 0.054285
| 0.957778
| 0.952249
| 0.94672
| 0.938175
| 0.927117
| 0.920834
| 0
| 0.038127
| 0.402291
| 8,469
| 248
| 90
| 34.149194
| 0.747926
| 0
| 0
| 0.747706
| 0
| 0
| 0.087141
| 0.010391
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055046
| false
| 0
| 0.036697
| 0
| 0.091743
| 0.027523
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c8637a6370ceb97da9f03b9be452a02d5d4cd88
| 35,961
|
py
|
Python
|
pneumoRL/env.py
|
richielo/Medical_Localization_RL
|
58653170824ee087f10b6c8650ee9bc8e05b64e9
|
[
"MIT"
] | 7
|
2018-12-24T05:43:37.000Z
|
2021-12-27T08:57:45.000Z
|
pneumoRL/env.py
|
richielo/Medical_Localization_RL
|
58653170824ee087f10b6c8650ee9bc8e05b64e9
|
[
"MIT"
] | 7
|
2019-09-10T06:15:28.000Z
|
2022-03-11T23:32:47.000Z
|
pneumoRL/env.py
|
richielo/Medical_Localization_RL
|
58653170824ee087f10b6c8650ee9bc8e05b64e9
|
[
"MIT"
] | null | null | null |
import os
import sys
import random
from data_util import *
from image_util import *
"""
2 methods for local data
1. crop and pad
2. crop and resize
"""
IMG_WIDTH = 1024
IMG_HEIGHT = 1024
#Adjustable
MIN_WIDTH = 10
MIN_HEIGHT = 10
def init_bounding_box(img_shape, coverage):
factor = math.floor(math.sqrt(img_shape[0] * img_shape[1] * coverage))
pos_bb = {1:[0,0, factor, factor], 2:[IMG_HEIGHT-factor-1, 0, factor, factor], 3:[IMG_HEIGHT-factor-1, IMG_WIDTH-factor-1, factor, factor], 4:[0, IMG_WIDTH-factor-1, factor, factor]}
return pos_bb[random.randint(1,4)]
def calculate_iou(bb1, bb2):
bb1x1 = bb1[1]
bb1x2 = bb1[1] + bb1[3]
bb1y1 = bb1[0]
bb1y2 = bb1[0] + bb1[2]
bb2x1 = bb2[1]
bb2x2 = bb2[1] + bb2[3]
bb2y1 = bb2[0]
bb2y2 = bb2[0] + bb2[2]
x1 = max(bb1x1, bb2x1)
y1 = max(bb1y1, bb2y1)
x2 = min(bb1x2, bb2x2)
y2 = min(bb1y2, bb2y2)
inter_buf_x = x2-x1+1
inter_buf_y = y2-y1+1
if(inter_buf_x <= 0 or inter_buf_y <= 0):
return 0
else:
inter_area = max(0, inter_buf_x) * max(0, inter_buf_y)
box1Area = (bb1x2 - bb1x1 + 1) * (bb1y2 - bb1y1 + 1)
box2Area = (bb2x2 - bb2x1 + 1) * (bb2y2 - bb2y1 + 1)
iou = inter_area / float(box1Area + box2Area - inter_area)
return iou
def calculate_manhattan_distance(bb1, bb2):
bb1x1 = bb1[1]
bb1x2 = bb1[1] + bb1[3]
bb1y1 = bb1[0]
bb1y2 = bb1[0] + bb1[2]
bb1x_center = int((bb1x1+bb1x2)/2.0)
bb1y_center = int((bb1y1+bb1y2)/2.0)
bb2x1 = bb2[1]
bb2x2 = bb2[1] + bb2[3]
bb2y1 = bb2[0]
bb2y2 = bb2[0] + bb2[2]
bb2x_center = int((bb2x1+bb2x2)/2.0)
bb2y_center = int((bb2y1+bb2y2)/2.0)
return abs(bb2y_center-bb1y_center) + abs(bb2x_center-bb1x_center)
"""
This defines the environment and interactions between it and the agent. Agent has control over the bounding box defined within the environment
"""
class PneumoEnv():
def __init__(self, dataDict, training, action_thres, trigger_thres, trigger_reward, init_bb_thres):
#TODO - action thres and starting coverage - tunable parameter
#Load the image's pixel array as the environment
self.full_env = get_dicom_image_data(dataDict['dicom'])
self.gt_boxes = None
self.target = None
self.label = None
self.action_threshold = action_thres
self.trigger_threshold = trigger_thres
self.trigger_reward = trigger_reward
self.init_bb_threshold = init_bb_thres
self.is_finished = False
#Initialize bounding box - randomly on 4 corners of the image, covering 80% of the image
#(y, x, height, width)
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
#History vector
if(training):
#Load ground truth bounding box(es)
self.gt_boxes = dataDict['boxes']
self.label = dataDict['label']
if(self.label == 1):
self.target_bb = self.gt_boxes[random.randint(0,len(self.gt_boxes)-1)]
def get_current_state(self):
"""
returns current bounding box's padded image + full_env
"""
pass
def get_reward(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
if(action == 8):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
return 1.0
else:
return -1.0
def get_reward_mod(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
oldbb_man_dist = calculate_manhattan_distance(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
newbb_man_dist = calculate_manhattan_distance(newBb, self.target_bb)
if(action == 8):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
reward = 0.0
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
reward += 1.0
else:
reward += -1.0
man_dist_diff = newbb_man_dist - oldbb_man_dist
if(man_dist_diff < 0):
reward += 1.0
else:
reward += -1.0
return reward
#@profile
def step_foresee(self, action):
#Forsee results of an action for guided exploration, without updating the environment
old_bb = self.bb.copy()
new_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
new_bb[1] -= a_x
if(new_bb[1] < 0):
new_bb[1] = 0
elif(action == 1):
#Horizontal - right
new_bb[1] += a_x
if(new_bb[1] + new_bb[3] > IMG_WIDTH - 1):
new_bb[1] = IMG_WIDTH - 1 - new_bb[3]
elif(action == 2):
#Vertical - Up
new_bb[0] += a_y
if(new_bb[0] + new_bb[2] > IMG_HEIGHT - 1):
new_bb[0] = IMG_HEIGHT - 1 - new_bb[2]
elif(action == 3):
#Vertical - Down
new_bb[0] -= a_y
if(new_bb[0] < 0):
new_bb[0] = 0
elif(action == 4):
#Scale - increase
new_bb[1] -= a_x
new_bb[3] += 2 * a_x
if(new_bb[1] < 0):
new_bb[1] = 0
if(new_bb[1] + new_bb[3] > IMG_WIDTH - 1):
new_bb[3] = IMG_WIDTH - 1 - new_bb[1]
new_bb[0] -= a_y
new_bb[2] += 2 * a_y
if(new_bb[0] < 0):
new_bb[0] = 0
if(new_bb[0] + new_bb[2] > IMG_HEIGHT - 1):
new_bb[2] = IMG_HEIGHT - 1 - new_bb[0]
elif(action == 5):
#Scale - decrease
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - fatter
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 7):
#Aspect ratio - taller
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
reward = self.get_reward_mod(action, old_bb, new_bb)
return reward
#@profile
def step(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - increase
self.bb[1] -= a_x
self.bb[3] += 2 * a_x
if(self.bb[1] < 0):
self.bb[1] = 0
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[3] = IMG_WIDTH - 1 - self.bb[1]
self.bb[0] -= a_y
self.bb[2] += 2 * a_y
if(self.bb[0] < 0):
self.bb[0] = 0
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[2] = IMG_HEIGHT - 1 - self.bb[0]
elif(action == 5):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 7):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 8):
#Trigger
self.is_finished = True
reward = self.get_reward_mod(action, old_bb, self.bb)
return old_bb, action, reward, self.bb, self.is_finished
def step_infer(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - increase
self.bb[1] -= a_x
self.bb[3] += 2 * a_x
if(self.bb[1] < 0):
self.bb[1] = 0
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[3] = IMG_WIDTH - 1 - self.bb[1]
self.bb[0] -= a_y
self.bb[2] += 2 * a_y
if(self.bb[0] < 0):
self.bb[0] = 0
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[2] = IMG_HEIGHT - 1 - self.bb[0]
elif(action == 5):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 7):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 8):
#Trigger
self.is_finished = True
return old_bb, action, self.bb, self.is_finished
def reset_bb(self):
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
def black_out(self):
self.full_env = set_bb_to_black(self.full_env, self.bb)
def extract_bound_box_image(self, bb):
"""
extracts pixel content of current bounding box
"""
bb_img = crop_pad_image(self.full_env, bb)
return bb_img
class PneumoEnv2():
"""
Without scale increase
"""
def __init__(self, dataDict, training, action_thres, trigger_thres, trigger_reward, init_bb_thres):
#TODO - action thres and starting coverage - tunable parameter
#Load the image's pixel array as the environment
self.full_env = get_dicom_image_data(dataDict['dicom'])
self.gt_boxes = None
self.target = None
self.label = None
self.action_threshold = action_thres
self.trigger_threshold = trigger_thres
self.trigger_reward = trigger_reward
self.init_bb_threshold = init_bb_thres
self.is_finished = False
#Initialize bounding box - randomly on 4 corners of the image, covering 80% of the image
#(y, x, height, width)
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
#History vector
if(training):
#Load ground truth bounding box(es)
self.gt_boxes = dataDict['boxes']
self.label = dataDict['label']
if(self.label == 1):
self.target_bb = self.gt_boxes[random.randint(0,len(self.gt_boxes)-1)]
def get_current_state(self):
"""
returns current bounding box's padded image + full_env
"""
pass
def get_reward(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
if(action == 7):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
return 1.0
elif iou_diff == 0:
return 0.0
else:
return -1.0
def get_reward_mod(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
oldbb_man_dist = calculate_manhattan_distance(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
newbb_man_dist = calculate_manhattan_distance(newBb, self.target_bb)
if(action == 7):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
reward = 0.0
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
reward += 1.0
elif iou_diff == 0:
reward += 0.0
else:
reward += -1.0
man_dist_diff = newbb_man_dist - oldbb_man_dist
if(man_dist_diff < 0):
reward += 1.0
elif man_dist_diff == 0:
reward += 0.0
else:
reward += -1.0
return reward
#@profile
def step_foresee(self, action):
#Forsee results of an action for guided exploration, without updating the environment
old_bb = self.bb.copy()
new_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
new_bb[1] -= a_x
if(new_bb[1] < 0):
new_bb[1] = 0
elif(action == 1):
#Horizontal - right
new_bb[1] += a_x
if(new_bb[1] + new_bb[3] > IMG_WIDTH - 1):
new_bb[1] = IMG_WIDTH - 1 - new_bb[3]
elif(action == 2):
#Vertical - Up
new_bb[0] += a_y
if(new_bb[0] + new_bb[2] > IMG_HEIGHT - 1):
new_bb[0] = IMG_HEIGHT - 1 - new_bb[2]
elif(action == 3):
#Vertical - Down
new_bb[0] -= a_y
if(new_bb[0] < 0):
new_bb[0] = 0
elif(action == 4):
#Scale - decrease
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
reward = self.get_reward_mod(action, old_bb, new_bb)
return reward
#@profile
def step(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 7):
#Trigger
self.is_finished = True
reward = self.get_reward_mod(action, old_bb, self.bb)
return old_bb, action, reward, self.bb, self.is_finished
def step_infer(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 7):
#Trigger
self.is_finished = True
return old_bb, action, self.bb, self.is_finished
def reset_bb(self):
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
def black_out(self):
self.full_env = set_bb_to_black(self.full_env, self.bb)
def extract_bound_box_image(self, bb):
"""
extracts pixel content of current bounding box
"""
bb_img = crop_pad_image(self.full_env, bb)
return bb_img
class PneumoEnv3():
"""
Without scale increase with corner-pivotal scaling
"""
def __init__(self, dataDict, training, action_thres, trigger_thres, trigger_reward, init_bb_thres):
#TODO - action thres and starting coverage - tunable parameter
#Load the image's pixel array as the environment
self.full_env = get_dicom_image_data(dataDict['dicom'])
self.gt_boxes = None
self.target = None
self.label = None
self.action_threshold = action_thres
self.trigger_threshold = trigger_thres
self.trigger_reward = trigger_reward
self.init_bb_threshold = init_bb_thres
self.is_finished = False
#Initialize bounding box - randomly on 4 corners of the image, covering 80% of the image
#(y, x, height, width)
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
#History vector
if(training):
#Load ground truth bounding box(es)
self.gt_boxes = dataDict['boxes']
self.label = dataDict['label']
if(self.label == 1):
self.target_bb = self.gt_boxes[random.randint(0,len(self.gt_boxes)-1)]
def get_current_state(self):
"""
returns current bounding box's padded image + full_env
"""
pass
def get_reward(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
if(action == 7):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
return 1.0
elif iou_diff == 0:
return 0.0
else:
return -1.0
def get_reward_mod(self, action, oldBb, newBb):
"""
* Assume must have ground truth boxes for training, can consider using terminate action, may destabalize training
2 separate reward schemes, depends on whether there are ground truth boxes or not
1. If has ground truth boxes, calculates reward based of IOU
2. Reward based on the number of steps it takes until it determines there is no candidate box
"""
oldbb_iou = calculate_iou(oldBb, self.target_bb)
oldbb_man_dist = calculate_manhattan_distance(oldBb, self.target_bb)
newbb_iou = calculate_iou(newBb, self.target_bb)
newbb_man_dist = calculate_manhattan_distance(newBb, self.target_bb)
if(action == 7):
if(newbb_iou >= self.trigger_threshold):
return self.trigger_reward
else:
return -1 * self.trigger_reward
else:
reward = 0.0
iou_diff = newbb_iou - oldbb_iou
if iou_diff > 0:
reward += 1.0
else:
reward += -1.0
man_dist_diff = newbb_man_dist - oldbb_man_dist
if(man_dist_diff < 0):
reward += 1.0
else:
reward += -1.0
return reward
#@profile
def step_foresee(self, action):
#Forsee results of an action for guided exploration, without updating the environment
old_bb = self.bb.copy()
new_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
new_bb[1] -= a_x
if(new_bb[1] < 0):
new_bb[1] = 0
elif(action == 1):
#Horizontal - right
new_bb[1] += a_x
if(new_bb[1] + new_bb[3] > IMG_WIDTH - 1):
new_bb[1] = IMG_WIDTH - 1 - new_bb[3]
elif(action == 2):
#Vertical - Up
new_bb[0] += a_y
if(new_bb[0] + new_bb[2] > IMG_HEIGHT - 1):
new_bb[0] = IMG_HEIGHT - 1 - new_bb[2]
elif(action == 3):
#Vertical - Down
new_bb[0] -= a_y
if(new_bb[0] < 0):
new_bb[0] = 0
elif(action == 4):
#Scale - decrease
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
new_bb[0] += a_y
new_bb[2] -= 2 * a_y
if(new_bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - new_bb[2]) / 2
new_bb[0] -= new_fac
new_bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
new_bb[1] += a_x
new_bb[3] -= 2 * a_x
if(new_bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - new_bb[3]) / 2
new_bb[1] -= new_fac
new_bb[3] += 2 * new_fac
reward = self.get_reward_mod(action, old_bb, new_bb)
return reward
#@profile
def step(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 7):
#Trigger
self.is_finished = True
reward = self.get_reward_mod(action, old_bb, self.bb)
return old_bb, action, reward, self.bb, self.is_finished
def step_infer(self, action):
"""
executes action selected by the agent
"""
old_bb = self.bb.copy()
a_x = int(self.action_threshold * self.bb[3])
a_y = int(self.action_threshold * self.bb[2])
if(action == 0):
#Horizontal - left
self.bb[1] -= a_x
if(self.bb[1] < 0):
self.bb[1] = 0
elif(action == 1):
#Horizontal - right
self.bb[1] += a_x
if(self.bb[1] + self.bb[3] > IMG_WIDTH - 1):
self.bb[1] = IMG_WIDTH - 1 - self.bb[3]
elif(action == 2):
#Vertical - Up
self.bb[0] += a_y
if(self.bb[0] + self.bb[2] > IMG_HEIGHT - 1):
self.bb[0] = IMG_HEIGHT - 1 - self.bb[2]
elif(action == 3):
#Vertical - Down
self.bb[0] -= a_y
if(self.bb[0] < 0):
self.bb[0] = 0
elif(action == 4):
#Scale - decrease
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 5):
#Aspect ratio - fatter
self.bb[0] += a_y
self.bb[2] -= 2 * a_y
if(self.bb[2] < MIN_HEIGHT):
new_fac = (MIN_HEIGHT - self.bb[2]) / 2
self.bb[0] -= new_fac
self.bb[2] += 2 * new_fac
elif(action == 6):
#Aspect ratio - taller
self.bb[1] += a_x
self.bb[3] -= 2 * a_x
if(self.bb[3] < MIN_WIDTH):
new_fac = (MIN_WIDTH - self.bb[3]) / 2
self.bb[1] -= new_fac
self.bb[3] += 2 * new_fac
elif(action == 7):
#Trigger
self.is_finished = True
return old_bb, action, self.bb, self.is_finished
def reset_bb(self):
self.bb = init_bounding_box(self.full_env.shape, self.init_bb_threshold)
def black_out(self):
self.full_env = set_bb_to_black(self.full_env, self.bb)
def extract_bound_box_image(self, bb):
"""
extracts pixel content of current bounding box
"""
bb_img = crop_pad_image(self.full_env, bb)
return bb_img
| 36.769939
| 186
| 0.496176
| 4,945
| 35,961
| 3.417594
| 0.045298
| 0.114675
| 0.031065
| 0.011538
| 0.939172
| 0.938757
| 0.934911
| 0.93426
| 0.933787
| 0.931361
| 0
| 0.046344
| 0.386168
| 35,961
| 977
| 187
| 36.807574
| 0.719262
| 0.138928
| 0
| 0.94452
| 0
| 0
| 0.001497
| 0
| 0
| 0
| 0
| 0.003071
| 0
| 1
| 0.044655
| false
| 0.00406
| 0.006766
| 0
| 0.108254
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5c8ed03a0893c07adb2d374119f1876c5d2fa15a
| 484
|
py
|
Python
|
sokoban/numtup.py
|
aurzenligl/tdd_training
|
7a83ed77181297fcb45712c7998af972032794c5
|
[
"MIT"
] | null | null | null |
sokoban/numtup.py
|
aurzenligl/tdd_training
|
7a83ed77181297fcb45712c7998af972032794c5
|
[
"MIT"
] | 12
|
2017-11-27T21:57:25.000Z
|
2017-11-27T22:01:58.000Z
|
sokoban/numtup.py
|
aurzenligl/tdd_training
|
7a83ed77181297fcb45712c7998af972032794c5
|
[
"MIT"
] | null | null | null |
class numtup(tuple):
def __add__(self, other):
if isinstance(other, tuple):
assert len(self) == len(other)
return numtup(lhs + rhs for lhs, rhs in zip(self, other))
return numtup(lhs + other for lhs in self)
def __mul__(self, other):
if isinstance(other, tuple):
assert len(self) == len(other)
return numtup(lhs * rhs for lhs, rhs in zip(self, other))
return numtup(lhs * other for lhs in self)
| 37.230769
| 69
| 0.590909
| 67
| 484
| 4.149254
| 0.268657
| 0.129496
| 0.244604
| 0.28777
| 0.899281
| 0.899281
| 0.899281
| 0.899281
| 0.899281
| 0.899281
| 0
| 0
| 0.303719
| 484
| 12
| 70
| 40.333333
| 0.824926
| 0
| 0
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.181818
| false
| 0
| 0
| 0
| 0.636364
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5ca5f1e491c32be40f7ef56c8f8c426f7aeee9fc
| 3,488
|
py
|
Python
|
tests/integration/tree/test_branch_length_multiplier_integration.py
|
yuzhenpeng/PhyKIT
|
167b9dfe0dd0bddd4b23492d9a3dc34e56debbd7
|
[
"MIT"
] | 26
|
2020-10-28T10:33:33.000Z
|
2022-02-04T14:59:22.000Z
|
tests/integration/tree/test_branch_length_multiplier_integration.py
|
yuzhenpeng/PhyKIT
|
167b9dfe0dd0bddd4b23492d9a3dc34e56debbd7
|
[
"MIT"
] | 4
|
2021-03-28T22:05:39.000Z
|
2022-03-22T00:33:01.000Z
|
tests/integration/tree/test_branch_length_multiplier_integration.py
|
JLSteenwyk/PhyKIT
|
0b3194d1bb5c189993b256fe96011cce48b9bbb4
|
[
"MIT"
] | 4
|
2020-11-06T11:58:25.000Z
|
2021-08-17T16:57:51.000Z
|
import pytest
import sys
from math import isclose
from mock import patch, call
from pathlib import Path
from textwrap import dedent
from phykit.phykit import Phykit
here = Path(__file__)
@pytest.mark.integration
class TestBranchLengthMultiplier(object):
@patch("builtins.print")
def test_branch_length_multiplier_custom_output(self, mocked_print):
testargs = [
"phykit",
"branch_length_multiplier",
f"{here.parent.parent.parent}/sample_files/tree_simple.tre",
"-f",
"5",
"-o",
"./tests/sample_files/tree_simple_blm_5.tre"
]
with patch.object(sys, "argv", testargs):
Phykit()
with open(f"{here.parent.parent}/expected/tree_simple_blm_5.tre", "r") as expected_tree:
expected_tree_content = expected_tree.read()
with open(f"{here.parent.parent.parent}/sample_files/tree_simple_blm_5.tre", "r") as out_tree:
out_tree_content = out_tree.read()
assert expected_tree_content == out_tree_content
@patch("builtins.print")
def test_branch_length_multiplier_default_output(self, mocked_print):
testargs = [
"phykit",
"branch_length_multiplier",
f"{here.parent.parent.parent}/sample_files/tree_simple.tre",
"-f",
"2",
]
with patch.object(sys, "argv", testargs):
Phykit()
with open(f"{here.parent.parent}/expected/tree_simple.tre.factor_2.0.tre", "r") as expected_tree:
expected_tree_content = expected_tree.read()
with open(f"{here.parent.parent.parent}/sample_files/tree_simple.tre.factor_2.0.tre", "r") as out_tree:
out_tree_content = out_tree.read()
assert expected_tree_content == out_tree_content
@patch("builtins.print")
def test_branch_length_multiplier_alias(self, mocked_print):
testargs = [
"phykit",
"blm",
f"{here.parent.parent.parent}/sample_files/tree_simple.tre",
"-f",
"2",
]
with patch.object(sys, "argv", testargs):
Phykit()
with open(f"{here.parent.parent}/expected/tree_simple.tre.factor_2.0.tre", "r") as expected_tree:
expected_tree_content = expected_tree.read()
with open(f"{here.parent.parent.parent}/sample_files/tree_simple.tre.factor_2.0.tre", "r") as out_tree:
out_tree_content = out_tree.read()
assert expected_tree_content == out_tree_content
@patch("builtins.print")
def test_branch_length_multiplier_incorrect_input(self, mocked_print):
testargs = [
"phykit",
"blm",
f"{here.parent.parent.parent}/sample_files/tree_simple.tr",
"-f",
"2",
]
with pytest.raises(SystemExit) as pytest_wrapped_e:
Phykit()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 2
@patch("builtins.print")
def test_branch_length_multiplier_incorrect_factor(self, mocked_print):
testargs = [
"phykit",
"blm",
f"{here.parent.parent.parent}/sample_files/tree_simple.tr",
"-f",
]
with pytest.raises(SystemExit) as pytest_wrapped_e:
Phykit()
assert pytest_wrapped_e.type == SystemExit
assert pytest_wrapped_e.value.code == 2
| 32.90566
| 111
| 0.616686
| 418
| 3,488
| 4.868421
| 0.165072
| 0.112039
| 0.059459
| 0.091892
| 0.879607
| 0.879607
| 0.879607
| 0.861916
| 0.838821
| 0.80688
| 0
| 0.006677
| 0.270069
| 3,488
| 106
| 112
| 32.90566
| 0.792616
| 0
| 0
| 0.705882
| 0
| 0
| 0.253941
| 0.212955
| 0
| 0
| 0
| 0
| 0.082353
| 1
| 0.058824
| false
| 0
| 0.082353
| 0
| 0.152941
| 0.117647
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5cb25d068a43c45f466e895f9da03f6781cec6d4
| 177
|
py
|
Python
|
webpt/__init__.py
|
cool-RR/webpt
|
817f2bf0b66f322a4088ec719915af6ab226c31f
|
[
"MIT"
] | null | null | null |
webpt/__init__.py
|
cool-RR/webpt
|
817f2bf0b66f322a4088ec719915af6ab226c31f
|
[
"MIT"
] | null | null | null |
webpt/__init__.py
|
cool-RR/webpt
|
817f2bf0b66f322a4088ec719915af6ab226c31f
|
[
"MIT"
] | null | null | null |
from webpt.request_analysis import *
from webpt.spider import *
from webpt.response_analysis import *
from webpt.port_scanner import *
import requests
import urllib3
import bs4
| 22.125
| 37
| 0.830508
| 25
| 177
| 5.76
| 0.48
| 0.25
| 0.3125
| 0.319444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012903
| 0.124294
| 177
| 7
| 38
| 25.285714
| 0.916129
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5cb58463c7ee09abef7ec6d3534c0bbbf324ba4b
| 16,580
|
py
|
Python
|
plot.py
|
og-brandon/TSCI-II-2020-soundecology-paper-demonstration
|
57f39751c7a6e51e27148bbd4e4b1e731d42fac9
|
[
"MIT"
] | null | null | null |
plot.py
|
og-brandon/TSCI-II-2020-soundecology-paper-demonstration
|
57f39751c7a6e51e27148bbd4e4b1e731d42fac9
|
[
"MIT"
] | null | null | null |
plot.py
|
og-brandon/TSCI-II-2020-soundecology-paper-demonstration
|
57f39751c7a6e51e27148bbd4e4b1e731d42fac9
|
[
"MIT"
] | null | null | null |
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import csv
# ---------------------------------------------------------------
# Bosque de Manzanillo
# ubicacion de archivos
ubicacionACI = 'manzanillo_forest_results_acoustic_complexity_index_ACI.csv'
ubicacionADI = 'manzanillo_forest_results_acoustic_diversity_ADI.csv'
ubicacionH = 'manzanillo_forest_results_acoustic_entropy_H.csv'
ubicacionAEI = 'manzanillo_forest_results_acoustic_evenness_AEI.csv'
ubicacionBIO = 'manzanillo_forest_results_bioacoustic_index_BIO.csv'
ubicacionNDSI = 'manzanillo_forest_results_ndsi.csv'
# ejemplo de titulo: "Bosque de Manzanillo"
titulo = "Bosque de Manzanillo"
fig, axs = plt.subplots(2, 3)
data = []
x_labels = []
with open(ubicacionACI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figACI = plt.figure()
plt.ylim((1200,2100))
axs[0, 0].plot(list1, list2, marker='o', color='black')
axs[0, 0].set_ylim((1200,2100))
axs[0, 0].set_title('ACI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
figACI.suptitle("{} ACI".format(titulo), fontsize=20)
figACI.savefig("{} ACI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionADI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figADI = plt.figure()
axs[0, 2].plot(list1, list2, marker='o', color='black')
axs[0, 2].set_title('ADI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((1.0,2.2))
figACI.suptitle("{} ADI".format(titulo), fontsize=20)
figACI.savefig("{} ADI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionH,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figH = plt.figure()
axs[0, 1].plot(list1, list2, marker='o', color='black')
axs[0, 1].set_title('H')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0.65,0.9))
figH.suptitle("{} H".format(titulo), fontsize=20)
figH.savefig("{} H.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionAEI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 0].plot(list1, list2, marker='o', color='black')
axs[1, 0].set_title('AEI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0.25,0.75))
figAEI.suptitle("{} AEI".format(titulo), fontsize=20)
figAEI.savefig("{} AEI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionBIO,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 1].plot(list1, list2, marker='o', color='black')
axs[1, 1].set_title('BIO')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0,75))
figAEI.suptitle("{} BIO".format(titulo), fontsize=20)
figAEI.savefig("{} BIO.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionNDSI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[11])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 2].plot(list1, list2, marker='o', color='black')
axs[1, 2].set_title('NDSI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((-0.35,1.1))
figAEI.suptitle("{} NDSI".format(titulo), fontsize=20)
figAEI.savefig("{} NDSI.jpg".format(titulo))
fig.text(0.5, 0.04, 'Tiempo (24hrs)', ha='center')
fig.text(0.04, 0.5, 'Indice acustico', va='center', rotation='vertical')
fig.suptitle(titulo, fontsize=20)
fig.set_size_inches(12, 8)
fig.savefig("{} plots.jpg".format(titulo))
# ---------------------------------------------------------------
# Costa del caribe
# ubicacion de archivos
ubicacionACI = 'caribbean_coast_results_acoustic_complexity_index_ACI.csv'
ubicacionADI = 'caribbean_coast_results_acoustic_diversity_ADI.csv'
ubicacionH = 'caribbean_coast_results_acoustic_entropy_H.csv'
ubicacionAEI = 'caribbean_coast_results_acoustic_evenness_AEI.csv'
ubicacionBIO = 'caribbean_coast_results_bioacoustic_index_BIO.csv'
ubicacionNDSI = 'caribbean_coast_results_ndsi.csv'
# ejemplo de titulo: "Bosque de Manzanillo"
titulo = "Costa del caribe"
fig, axs = plt.subplots(2, 3)
data = []
x_labels = []
with open(ubicacionACI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figACI = plt.figure()
plt.ylim((1200,2100))
axs[0, 0].plot(list1, list2, marker='o', color='black')
axs[0, 0].set_ylim((1200,2100))
axs[0, 0].set_title('ACI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
figACI.suptitle("{} ACI".format(titulo), fontsize=20)
figACI.savefig("{} ACI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionADI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figADI = plt.figure()
axs[0, 2].plot(list1, list2, marker='o', color='black')
axs[0, 2].set_title('ADI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((1.0,2.2))
figADI.suptitle("{} ADI".format(titulo), fontsize=20)
figADI.savefig("{} ADI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionH,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figH = plt.figure()
axs[0, 1].plot(list1, list2, marker='o', color='black')
axs[0, 1].set_title('H')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0.65,0.9))
figH.suptitle("{} H".format(titulo), fontsize=20)
figH.savefig("{} H.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionAEI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 0].plot(list1, list2, marker='o', color='black')
axs[1, 0].set_title('AEI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0.25,0.75))
figAEI.suptitle("{} AEI".format(titulo), fontsize=20)
figAEI.savefig("{} AEI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionBIO,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 1].plot(list1, list2, marker='o', color='black')
axs[1, 1].set_title('BIO')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0,75))
figAEI.suptitle("{} BIO".format(titulo), fontsize=20)
figAEI.savefig("{} BIO.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionNDSI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[11])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 2].plot(list1, list2, marker='o', color='black')
axs[1, 2].set_title('NDSI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((-0.6,1.1))
figAEI.suptitle("{} NDSI".format(titulo), fontsize=20)
figAEI.savefig("{} NDSI.jpg".format(titulo))
fig.text(0.5, 0.04, 'Tiempo (24hrs)', ha='center')
fig.text(0.04, 0.5, 'Indice acustico', va='center', rotation='vertical')
fig.suptitle(titulo, fontsize=20)
fig.set_size_inches(12, 8)
fig.savefig("{} plots.jpg".format(titulo))
# ---------------------------------------------------------------
# Parque nacional del blanco
# ubicacion de archivos
ubicacionACI = 'blanco_national_park_results_acoustic_complexity_index_ACI.csv'
ubicacionADI = 'blanco_national_park_results_acoustic_diversity_ADI.csv'
ubicacionH = 'blanco_national_park_results_acoustic_entropy_H.csv'
ubicacionAEI = 'blanco_national_park_results_acoustic_evenness_AEI.csv'
ubicacionBIO = 'blanco_national_park_results_bioacoustic_index_BIO.csv'
ubicacionNDSI = 'blanco_national_park_results_ndsi.csv'
# ejemplo de titulo: "Bosque de Manzanillo"
titulo = "Parque nacional del blanco"
fig, axs = plt.subplots(2, 3)
data = []
x_labels = []
with open(ubicacionACI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figACI = plt.figure()
plt.ylim((1200,2100))
axs[0, 0].plot(list1, list2, marker='o', color='black')
axs[0, 0].set_ylim((1200,2100))
axs[0, 0].set_title('ACI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
figACI.suptitle("{} ACI".format(titulo), fontsize=20)
figACI.savefig("{} ACI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionADI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figADI = plt.figure()
axs[0, 2].plot(list1, list2, marker='o', color='black')
axs[0, 2].set_title('ADI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((1.0,2.2))
figADI.suptitle("{} ADI".format(titulo), fontsize=20)
figADI.savefig("{} ADI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionH,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[10])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figH = plt.figure()
axs[0, 1].plot(list1, list2, marker='o', color='black')
axs[0, 1].set_title('H')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
figH.suptitle("{} H".format(titulo), fontsize=20)
figH.savefig("{} H.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionAEI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 0].plot(list1, list2, marker='o', color='black')
axs[1, 0].set_title('AEI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0.25,0.85))
figAEI.suptitle("{} AEI".format(titulo), fontsize=20)
figAEI.savefig("{} AEI.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionBIO,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[9])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 1].plot(list1, list2, marker='o', color='black')
axs[1, 1].set_title('BIO')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((0,75))
figAEI.suptitle("{} BIO".format(titulo), fontsize=20)
figAEI.savefig("{} BIO.jpg".format(titulo))
data = []
x_labels = []
with open(ubicacionNDSI,'r') as csvfile:
plots = csv.reader(csvfile, delimiter=',')
next(csvfile)
for row in plots:
if row:
data.append([float(row[11])])
x_labels.append(int(row[0][:-4]))
zipped_lists = zip(x_labels, data)
sorted_pairs = sorted(zipped_lists)
tuples = zip(*sorted_pairs)
list1, list2 = [ list(tuple) for tuple in tuples]
figAEI = plt.figure()
axs[1, 2].plot(list1, list2, marker='o', color='black')
axs[1, 2].set_title('NDSI')
plt.plot(list1, list2, marker='o', color='black')
plt.xlabel('Tiempo (24hrs)')
plt.ylabel('Indice acustico')
plt.ylim((-0.6,1.1))
figAEI.suptitle("{} NDSI".format(titulo), fontsize=20)
figAEI.savefig("{} NDSI.jpg".format(titulo))
fig.text(0.5, 0.04, 'Tiempo (24hrs)', ha='center')
fig.text(0.04, 0.5, 'Indice acustico', va='center', rotation='vertical')
fig.suptitle(titulo, fontsize=20)
fig.set_size_inches(12, 8)
fig.savefig("{} plots.jpg".format(titulo))
| 30.590406
| 80
| 0.644029
| 2,348
| 16,580
| 4.44293
| 0.056218
| 0.036235
| 0.048313
| 0.069018
| 0.956864
| 0.945552
| 0.895514
| 0.88171
| 0.88171
| 0.88171
| 0
| 0.035501
| 0.170929
| 16,580
| 542
| 81
| 30.590406
| 0.72341
| 0.027021
| 0
| 0.93424
| 0
| 0
| 0.143976
| 0.057192
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00907
| 0
| 0.00907
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ff649882857b45ea87ac3dc54b188abd01262bf
| 3,437
|
py
|
Python
|
src/tests/test_comms_models.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 5
|
2015-01-30T08:47:59.000Z
|
2022-01-22T19:27:03.000Z
|
src/tests/test_comms_models.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 2
|
2017-12-28T21:36:48.000Z
|
2017-12-28T21:36:57.000Z
|
src/tests/test_comms_models.py
|
reddcoin-project/ReddConnect
|
5c212683de6b80b81fd15ed05239c3a1b46c3afd
|
[
"BSD-3-Clause"
] | 1
|
2019-01-05T15:51:37.000Z
|
2019-01-05T15:51:37.000Z
|
import unittest
class TestMsg(unittest.TestCase):
def test___init__(self):
# msg = Msg(*args, **kwargs)
assert True # TODO: implement your test here
def test___str__(self):
# msg = Msg(*args, **kwargs)
# self.assertEqual(expected, msg.__str__())
assert True # TODO: implement your test here
def test_remove_receiver(self):
# msg = Msg(*args, **kwargs)
# self.assertEqual(expected, msg.remove_receiver(obj))
assert True # TODO: implement your test here
def test_remove_sender(self):
# msg = Msg(*args, **kwargs)
# self.assertEqual(expected, msg.remove_sender(value))
assert True # TODO: implement your test here
class TestTempMsg(unittest.TestCase):
def test___init__(self):
# temp_msg = TempMsg(senders, receivers, channels, message, header, type, lockstring, hide_from)
assert True # TODO: implement your test here
def test___str__(self):
# temp_msg = TempMsg(senders, receivers, channels, message, header, type, lockstring, hide_from)
# self.assertEqual(expected, temp_msg.__str__())
assert True # TODO: implement your test here
def test_access(self):
# temp_msg = TempMsg(senders, receivers, channels, message, header, type, lockstring, hide_from)
# self.assertEqual(expected, temp_msg.access(accessing_obj, access_type, default))
assert True # TODO: implement your test here
def test_remove_receiver(self):
# temp_msg = TempMsg(senders, receivers, channels, message, header, type, lockstring, hide_from)
# self.assertEqual(expected, temp_msg.remove_receiver(obj))
assert True # TODO: implement your test here
def test_remove_sender(self):
# temp_msg = TempMsg(senders, receivers, channels, message, header, type, lockstring, hide_from)
# self.assertEqual(expected, temp_msg.remove_sender(obj))
assert True # TODO: implement your test here
class TestChannelDB(unittest.TestCase):
def test___init__(self):
# channel_d_b = ChannelDB(*args, **kwargs)
assert True # TODO: implement your test here
def test___str__(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.__str__())
assert True # TODO: implement your test here
def test_access(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.access(accessing_obj, access_type, default))
assert True # TODO: implement your test here
def test_connect(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.connect(player))
assert True # TODO: implement your test here
def test_delete(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.delete())
assert True # TODO: implement your test here
def test_disconnect(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.disconnect(player))
assert True # TODO: implement your test here
def test_has_connection(self):
# channel_d_b = ChannelDB(*args, **kwargs)
# self.assertEqual(expected, channel_d_b.has_connection(player))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
| 40.435294
| 104
| 0.673553
| 421
| 3,437
| 5.228029
| 0.130641
| 0.050886
| 0.101772
| 0.167197
| 0.926851
| 0.924125
| 0.885507
| 0.875511
| 0.819173
| 0.782372
| 0
| 0
| 0.224324
| 3,437
| 84
| 105
| 40.916667
| 0.825581
| 0.615653
| 0
| 0.736842
| 0
| 0
| 0.006275
| 0
| 0
| 0
| 0
| 0.011905
| 0.421053
| 1
| 0.421053
| false
| 0
| 0.026316
| 0
| 0.526316
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
8f2919158f454e06613cc1cbf61d390e2945f333
| 9,649
|
py
|
Python
|
Python_Backend/testing/test_answer_generator.py
|
kaitlin31415/BCI4KidsMediapipe
|
913ad540716bec476148a3f31001b279c86d9297
|
[
"Apache-2.0"
] | 5
|
2021-10-04T20:55:37.000Z
|
2022-01-31T22:12:31.000Z
|
Python_Backend/testing/test_answer_generator.py
|
kaitlin31415/BCI4KidsMediapipe
|
913ad540716bec476148a3f31001b279c86d9297
|
[
"Apache-2.0"
] | 158
|
2021-09-29T23:43:08.000Z
|
2022-03-31T21:05:46.000Z
|
Python_Backend/testing/test_answer_generator.py
|
kaitlin31415/BCI4KidsMediapipe
|
913ad540716bec476148a3f31001b279c86d9297
|
[
"Apache-2.0"
] | 3
|
2021-09-27T23:00:36.000Z
|
2022-01-31T22:12:33.000Z
|
import unittest
import sys
sys.path.insert(0, '../')
import AnswerGenerator
from AnswerGenerator import Answer
class TestAnswerGenerator(unittest.TestCase):
# Unit tests for FacialAnswerGenerator
def test_facial_add_smile_to_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
ag.add_frame_to_queue("SMILE")
self.assertEqual(ag._FacialAnswerGenerator__past_frames[ag.QUEUE_SIZE - 1], "SMILE")
def test_facial_add_frown_to_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
ag.add_frame_to_queue("FROWN")
self.assertEqual(ag._FacialAnswerGenerator__past_frames[ag.QUEUE_SIZE - 1], "FROWN")
def test_facial_overfill_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(ag.QUEUE_SIZE * 2):
if x % 2 == 0:
ag.add_frame_to_queue("SMILE")
else:
ag.add_frame_to_queue("FROWN")
for x in range(ag.QUEUE_SIZE):
if x % 2 == 0:
self.assertEqual(ag._FacialAnswerGenerator__past_frames[x], "SMILE")
else:
self.assertEqual(ag._FacialAnswerGenerator__past_frames[x], "FROWN")
def test_facial_clear_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
ag.add_frame_to_queue("SMILE")
ag.clear_queue()
for frame in ag._FacialAnswerGenerator__past_frames:
self.assertEqual(frame, "INIT")
def test_facial_clear_full_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(ag.QUEUE_SIZE):
if x % 2 == 0:
ag.add_frame_to_queue("SMILE")
else:
ag.add_frame_to_queue("FROWN")
ag.clear_queue()
for frame in ag._FacialAnswerGenerator__past_frames:
self.assertEqual(frame, "INIT")
def test_facial_clear_empty_queue(self):
ag = AnswerGenerator.FacialAnswerGenerator()
ag.clear_queue()
for frame in ag._FacialAnswerGenerator__past_frames:
self.assertEqual(frame, "INIT")
def test_facial_yes_series_all_smiles(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("SMILE")
self.assertEqual(ag.determine_answer(), Answer.YES)
def test_facial_yes_series_one_frown(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("SMILE")
ag.add_frame_to_queue("FROWN")
self.assertEqual(ag.determine_answer(), Answer.YES)
def test_facial_no_series_all_frowns(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("FROWN")
self.assertEqual(ag.determine_answer(), Answer.NO)
def test_facial_no_series_one_smile(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("FROWN")
ag.add_frame_to_queue("SMILE")
self.assertEqual(ag.determine_answer(), Answer.NO)
def test_facial_undefined_series_mixed(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(ag.QUEUE_SIZE):
if x % 3 == 0:
ag.add_frame_to_queue("SMILE")
elif x % 2 == 0:
ag.add_frame_to_queue("FROWN")
else:
ag.add_frame_to_queue("NEUTRAL")
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_facial_undefined_series_init(self):
ag = AnswerGenerator.FacialAnswerGenerator()
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_facial_undefined_series_clear(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("SMILE")
ag.clear_queue()
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_facial_undefined_series_neutral(self):
ag = AnswerGenerator.FacialAnswerGenerator()
for x in range(len(ag._FacialAnswerGenerator__past_frames)):
ag.add_frame_to_queue("NEUTRAL")
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_facial_add_invalid_frame(self):
with self.assertRaises(Exception) as context:
ag = AnswerGenerator.FacialAnswerGenerator()
ag.add_frame_to_queue("San Pellegrino")
self.assertTrue("FacialAnswerGenerator: Invalid frame cannot be added to queue" in str(context.exception))
# Unit tests for IrisAnswerGenerator
def test_iris_add_yes_to_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
ag.add_frame_to_queue("YES")
self.assertEqual(ag._IrisAnswerGenerator__past_states[ag.QUEUE_SIZE - 1], "YES")
def test_iris_add_frown_to_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
ag.add_frame_to_queue("NO")
self.assertEqual(ag._IrisAnswerGenerator__past_states[ag.QUEUE_SIZE - 1], "NO")
def test_iris_overfill_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(ag.QUEUE_SIZE * 2):
if x % 2 == 0:
ag.add_frame_to_queue("YES")
else:
ag.add_frame_to_queue("NO")
for x in range(ag.QUEUE_SIZE):
if x % 2 == 0:
self.assertEqual(ag._IrisAnswerGenerator__past_states[x], "YES")
else:
self.assertEqual(ag._IrisAnswerGenerator__past_states[x], "NO")
def test_iris_clear_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
ag.add_frame_to_queue("YES")
ag.clear_queue()
for frame in ag._IrisAnswerGenerator__past_states:
self.assertEqual(frame, "INIT")
def test_iris_clear_full_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(ag.QUEUE_SIZE):
if x % 2 == 0:
ag.add_frame_to_queue("YES")
else:
ag.add_frame_to_queue("NO")
ag.clear_queue()
for frame in ag._IrisAnswerGenerator__past_states:
self.assertEqual(frame, "INIT")
def test_iris_clear_empty_queue(self):
ag = AnswerGenerator.IrisAnswerGenerator()
ag.clear_queue()
for frame in ag._IrisAnswerGenerator__past_states:
self.assertEqual(frame, "INIT")
def test_iris_yes_series_all_yeses(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("YES")
self.assertEqual(ag.determine_answer(), Answer.YES)
def test_iris_yes_series_one_no(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("YES")
ag.add_frame_to_queue("NO")
self.assertEqual(ag.determine_answer(), Answer.YES)
def test_iris_no_series_all_nos(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("NO")
self.assertEqual(ag.determine_answer(), Answer.NO)
def test_iris_no_series_one_yes(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("NO")
ag.add_frame_to_queue("YES")
self.assertEqual(ag.determine_answer(), Answer.NO)
def test_iris_undefined_series_mixed(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(ag.QUEUE_SIZE):
if x % 3 == 0:
ag.add_frame_to_queue("YES")
elif x % 2 == 0:
ag.add_frame_to_queue("NO")
else:
ag.add_frame_to_queue("NEUTRAL")
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_iris_undefined_series_init(self):
ag = AnswerGenerator.IrisAnswerGenerator()
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_iris_undefined_series_clear(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("YES")
ag.clear_queue()
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_iris_undefined_series_neutral(self):
ag = AnswerGenerator.IrisAnswerGenerator()
for x in range(len(ag._IrisAnswerGenerator__past_states)):
ag.add_frame_to_queue("NEUTRAL")
self.assertEqual(ag.determine_answer(), Answer.UNDEFINED)
def test_iris_add_invalid_frame(self):
with self.assertRaises(Exception) as context:
ag = AnswerGenerator.IrisAnswerGenerator()
ag.add_frame_to_queue("San Pellegrino")
self.assertTrue("IrisAnswerGenerator: Invalid state cannot be added to queue" in str(context.exception))
| 32.270903
| 114
| 0.640792
| 1,096
| 9,649
| 5.292883
| 0.069343
| 0.053094
| 0.065506
| 0.078607
| 0.913118
| 0.896914
| 0.857094
| 0.82572
| 0.818824
| 0.780038
| 0
| 0.003825
| 0.268422
| 9,649
| 299
| 115
| 32.270903
| 0.817963
| 0.007358
| 0
| 0.757895
| 0
| 0
| 0.036971
| 0.002298
| 0
| 0
| 0
| 0
| 0.178947
| 1
| 0.157895
| false
| 0
| 0.021053
| 0
| 0.184211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8f3a1fee63ae89bbf8e3ea30f4f4f56144cac660
| 213
|
py
|
Python
|
stock_trading_backend/simulation/__init__.py
|
iryzhkov/stock-trading-backend
|
7161026b7b4deb78a934b66550c85a27c6b32933
|
[
"MIT"
] | 1
|
2021-01-27T18:24:02.000Z
|
2021-01-27T18:24:02.000Z
|
stock_trading_backend/simulation/__init__.py
|
iryzhkov/stock-trading-backend
|
7161026b7b4deb78a934b66550c85a27c6b32933
|
[
"MIT"
] | null | null | null |
stock_trading_backend/simulation/__init__.py
|
iryzhkov/stock-trading-backend
|
7161026b7b4deb78a934b66550c85a27c6b32933
|
[
"MIT"
] | null | null | null |
"""__init__ file for simulation sub-package
"""
from stock_trading_backend.simulation.reward_factory import create_reward
from stock_trading_backend.simulation.stock_market_simulation import StockMarketSimulation
| 42.6
| 90
| 0.882629
| 26
| 213
| 6.769231
| 0.615385
| 0.102273
| 0.181818
| 0.261364
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065728
| 213
| 4
| 91
| 53.25
| 0.884422
| 0.187793
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
8f80b92dc116dccfd10584b8abe2cbec44847852
| 14,091
|
py
|
Python
|
CDARTS_segmentation/dataloaders/__init__.py
|
penghouwen/CDARTS
|
7dddc8d5db4ed343979ed3687c6adfc39dfce284
|
[
"MIT"
] | 21
|
2020-06-19T01:05:38.000Z
|
2020-08-11T02:15:03.000Z
|
CDARTS_segmentation/dataloaders/__init__.py
|
penghouwen/CDARTS
|
7dddc8d5db4ed343979ed3687c6adfc39dfce284
|
[
"MIT"
] | 1
|
2020-07-11T17:01:07.000Z
|
2020-07-11T17:01:07.000Z
|
CDARTS_segmentation/dataloaders/__init__.py
|
penghouwen/CDARTS
|
7dddc8d5db4ed343979ed3687c6adfc39dfce284
|
[
"MIT"
] | 1
|
2020-11-02T02:43:20.000Z
|
2020-11-02T02:43:20.000Z
|
from dataloaders.datasets import cityscapes, kd, coco, combine_dbs, pascal, sbd
from dataloaders.segdatasets import Cityscapes, CityscapesPanoptic, COCOPanoptic
from torch.utils.data import DataLoader
import torch.utils.data.distributed
def make_data_loader(args, **kwargs):
root = args.data_path
if args.dist:
print("=> Using Distribued Sampler")
if args.dataset == 'cityscapes':
if args.autodeeplab == 'train':
train_set = cityscapes.CityscapesSegmentation(args, root, split='retrain')
num_class = train_set.NUM_CLASSES
train_sampler = torch.utils.data.distributed.DistributedSampler(train_set)
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, sampler=train_sampler, **kwargs)
val_set = cityscapes.CityscapesSegmentation(args, root, split='val')
test_set = cityscapes.CityscapesSegmentation(args, root, split='test')
val_sampler = torch.utils.data.distributed.DistributedSampler(val_set)
test_sampler = torch.utils.data.distributed.DistributedSampler(test_set)
val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, sampler=val_sampler, **kwargs)
test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=False, sampler=test_sampler, **kwargs)
elif args.autodeeplab == 'train_seg':
dataset_cfg = {
'cityscapes': dict(
root=args.data_path,
split='train',
is_train=True,
crop_size=(args.image_height, args.image_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225)
)}
train_set = Cityscapes(**dataset_cfg['cityscapes'])
num_class = train_set.num_classes
train_sampler = torch.utils.data.distributed.DistributedSampler(train_set)
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, sampler=train_sampler, **kwargs)
dataset_val_cfg = {
'cityscapes': dict(
root=args.data_path,
split='val',
is_train=False,
crop_size=(args.eval_height, args.eval_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225)
)}
val_set = Cityscapes(**dataset_val_cfg['cityscapes'])
val_sampler = torch.utils.data.distributed.DistributedSampler(val_set)
val_loader = DataLoader(val_set, batch_size=max(1, args.batch_size//4), shuffle=False, sampler=val_sampler, num_workers=args.workers, pin_memory=True, drop_last=False)
elif args.autodeeplab == 'train_seg_panoptic':
dataset_cfg = {
'cityscapes_panoptic': dict(
root=args.data_path,
split='train',
is_train=True,
crop_size=(args.image_height, args.image_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
train_set = CityscapesPanoptic(**dataset_cfg['cityscapes_panoptic'])
num_class = train_set.num_classes
train_sampler = torch.utils.data.distributed.DistributedSampler(train_set)
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, sampler=train_sampler, **kwargs)
dataset_val_cfg = {
'cityscapes_panoptic': dict(
root=args.data_path,
split='val',
is_train=False,
crop_size=(args.eval_height, args.eval_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
val_set = Cityscapes(**dataset_val_cfg['cityscapes_panoptic'])
val_sampler = torch.utils.data.distributed.DistributedSampler(val_set)
val_loader = DataLoader(val_set, batch_size=max(1, args.batch_size//4), shuffle=False, sampler=val_sampler, num_workers=args.workers, pin_memory=True, drop_last=False)
else:
raise Exception('autodeeplab param not set properly')
return train_loader, train_sampler, val_loader, val_sampler, num_class
elif args.dataset == 'coco':
if args.autodeeplab == 'train_seg_panoptic':
dataset_cfg = {
'coco_panoptic': dict(
root=args.data_path,
split='train2017',
is_train=True,
min_resize_value=args.image_height,
max_resize_value=args.image_height,
resize_factor=32,
crop_size=(args.image_height, args.image_width),
mirror=True,
min_scale=0.5,
max_scale=1.5,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
train_set = COCOPanoptic(**dataset_cfg['coco_panoptic'])
num_class = train_set.num_classes
train_sampler = torch.utils.data.distributed.DistributedSampler(train_set)
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, sampler=train_sampler, **kwargs)
# train_set = coco.COCOSegmentation(args, root, split='train')
# root=args.data_path
# val_set = coco.COCOSegmentation(args, root, split='val')
dataset_val_cfg = {
'coco_panoptic': dict(
root=args.data_path,
split='val2017',
is_train=True,
min_resize_value=args.image_height,
max_resize_value=args.image_height,
resize_factor=32,
crop_size=(args.eval_height, args.eval_width),
mirror=False,
min_scale=1,
max_scale=1,
scale_step_size=0,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
val_set = COCOPanoptic(**dataset_val_cfg['coco_panoptic'])
val_sampler = torch.utils.data.distributed.DistributedSampler(val_set)
val_loader = DataLoader(val_set, batch_size=args.batch_size*4, shuffle=False, sampler=val_sampler, num_workers=args.workers, pin_memory=True, drop_last=False)
return train_loader, train_sampler, val_loader, val_sampler, num_class
else:
raise NotImplementedError
else:
if args.dataset == 'pascal':
train_set = pascal.VOCSegmentation(args, root, split='train')
val_set = pascal.VOCSegmentation(args, root, split='val')
if args.use_sbd:
sbd_train = sbd.SBDSegmentation(args, root, split=['train', 'val'])
train_set = combine_dbs.CombineDBs([train_set, sbd_train], excluded=[val_set])
num_class = train_set.NUM_CLASSES
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, **kwargs)
test_loader = None
return train_loader, train_loader, val_loader, test_loader, num_class
elif args.dataset == 'cityscapes':
if args.autodeeplab == 'train_seg':
dataset_cfg = {
'cityscapes': dict(
root=args.data_path,
split='train',
is_train=True,
crop_size=(args.image_height, args.image_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225)
)}
train_set = Cityscapes(**dataset_cfg['cityscapes'])
num_class = train_set.num_classes
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, **kwargs)
dataset_val_cfg = {
'cityscapes': dict(
root=args.data_path,
split='val',
is_train=False,
crop_size=(args.eval_height, args.eval_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225)
)}
val_set = Cityscapes(**dataset_val_cfg['cityscapes'])
val_loader = DataLoader(val_set, batch_size=max(1, args.batch_size//4), shuffle=False, num_workers=args.workers, pin_memory=True, drop_last=False)
elif args.autodeeplab == 'train_seg_panoptic':
dataset_cfg = {
'cityscapes_panoptic': dict(
root=args.data_path,
split='train',
is_train=True,
crop_size=(args.image_height, args.image_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
train_set = CityscapesPanoptic(**dataset_cfg['cityscapes_panoptic'])
num_class = train_set.num_classes
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=False, **kwargs)
dataset_val_cfg = {
'cityscapes_panoptic': dict(
root=args.data_path,
split='val',
is_train=False,
crop_size=(args.eval_height, args.eval_width),
mirror=True,
min_scale=0.5,
max_scale=2.0,
scale_step_size=0.1,
mean=(0.485, 0.456, 0.406),
std=(0.229, 0.224, 0.225),
semantic_only=False,
ignore_stuff_in_offset=True,
small_instance_area=4096,
small_instance_weight=3
)}
val_set = Cityscapes(**dataset_val_cfg['cityscapes_panoptic'])
val_loader = DataLoader(val_set, batch_size=max(1, args.batch_size//4), shuffle=False, num_workers=args.workers, pin_memory=True, drop_last=False)
else:
raise Exception('autodeeplab param not set properly')
return train_loader, val_loader, num_class
elif args.dataset == 'coco':
train_set = coco.COCOSegmentation(args, root, split='train')
val_set = coco.COCOSegmentation(args, root, split='val')
num_class = train_set.NUM_CLASSES
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, **kwargs)
test_loader = None
return train_loader, train_loader, val_loader, test_loader, num_class
elif args.dataset == 'kd':
train_set = kd.CityscapesSegmentation(args, root, split='train')
val_set = kd.CityscapesSegmentation(args, root, split='val')
test_set = kd.CityscapesSegmentation(args, root, split='test')
num_class = train_set.NUM_CLASSES
train_loader1 = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, **kwargs)
train_loader2 = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, **kwargs)
test_loader = DataLoader(test_set, batch_size=args.batch_size, shuffle=False, **kwargs)
return train_loader1, train_loader2, val_loader, test_loader, num_class
else:
raise NotImplementedError
| 49.269231
| 183
| 0.542119
| 1,516
| 14,091
| 4.762533
| 0.078496
| 0.052355
| 0.034903
| 0.037673
| 0.914404
| 0.896676
| 0.840305
| 0.810665
| 0.786288
| 0.760249
| 0
| 0.039286
| 0.364133
| 14,091
| 285
| 184
| 49.442105
| 0.766518
| 0.009723
| 0
| 0.818533
| 0
| 0
| 0.042294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003861
| false
| 0
| 0.015444
| 0
| 0.042471
| 0.003861
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
56b6b924248b6cfc33ccff023dc524acb14ee4cc
| 924
|
py
|
Python
|
coadd_mdetsims/tests/test_masking.py
|
beckermr/metadetect-coadding-sims
|
15ccaec353aa61c69ac9d78d1dfca8ce25bca3cf
|
[
"BSD-3-Clause"
] | null | null | null |
coadd_mdetsims/tests/test_masking.py
|
beckermr/metadetect-coadding-sims
|
15ccaec353aa61c69ac9d78d1dfca8ce25bca3cf
|
[
"BSD-3-Clause"
] | null | null | null |
coadd_mdetsims/tests/test_masking.py
|
beckermr/metadetect-coadding-sims
|
15ccaec353aa61c69ac9d78d1dfca8ce25bca3cf
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
from ..masking import generate_bad_columns, generate_cosmic_rays
def test_generate_cosmic_rays_smoke():
rng = np.random.RandomState(seed=10)
msk = generate_cosmic_rays((64, 64), rng=rng)
assert np.any(msk)
def test_generate_cosmic_rays_seed():
rng = np.random.RandomState(seed=10)
msk1 = generate_cosmic_rays((64, 64), rng=rng)
rng = np.random.RandomState(seed=10)
msk2 = generate_cosmic_rays((64, 64), rng=rng)
assert np.array_equal(msk1, msk2)
def test_generate_bad_columns_smoke():
rng = np.random.RandomState(seed=10)
msk = generate_bad_columns((64, 64), rng=rng)
assert np.any(msk)
def test_generate_bad_columns_seed():
rng = np.random.RandomState(seed=10)
msk1 = generate_bad_columns((64, 64), rng=rng)
rng = np.random.RandomState(seed=10)
msk2 = generate_bad_columns((64, 64), rng=rng)
assert np.array_equal(msk1, msk2)
| 24.972973
| 64
| 0.712121
| 141
| 924
| 4.425532
| 0.191489
| 0.076923
| 0.173077
| 0.211538
| 0.891026
| 0.778846
| 0.778846
| 0.746795
| 0.746795
| 0.400641
| 0
| 0.057069
| 0.165584
| 924
| 36
| 65
| 25.666667
| 0.75227
| 0
| 0
| 0.454545
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.181818
| false
| 0
| 0.090909
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
711a1c4876a4f94faf8752fa54bcef71e1cc8810
| 185
|
py
|
Python
|
zarya/nn/__init__.py
|
kefirski/zarya
|
db1f84cef1c4ffa28aa7adb5dea6cf9f2ebf2f84
|
[
"MIT"
] | null | null | null |
zarya/nn/__init__.py
|
kefirski/zarya
|
db1f84cef1c4ffa28aa7adb5dea6cf9f2ebf2f84
|
[
"MIT"
] | null | null | null |
zarya/nn/__init__.py
|
kefirski/zarya
|
db1f84cef1c4ffa28aa7adb5dea6cf9f2ebf2f84
|
[
"MIT"
] | null | null | null |
from .modules import Embedding
from .modules import GRUCell
from .modules import Hyperbolic
from .modules import Hyperplane
from .modules import Linear
from .parameter import Parameter
| 26.428571
| 32
| 0.837838
| 24
| 185
| 6.458333
| 0.375
| 0.354839
| 0.548387
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12973
| 185
| 6
| 33
| 30.833333
| 0.962733
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
85a9b5eda998f237214ce3175fe3c2835d3fa805
| 739
|
py
|
Python
|
rastervision/data/label_source/__init__.py
|
AirbusAerial/raster-vision
|
cfa7826169392e497fb57a540eb952fc6cee3a98
|
[
"Apache-2.0"
] | 2
|
2019-04-17T13:04:23.000Z
|
2020-10-04T10:28:27.000Z
|
rastervision/data/label_source/__init__.py
|
Yochengliu/raster-vision
|
f5badc387df86ce02d84e0e274a08026dbf65bd6
|
[
"Apache-2.0"
] | null | null | null |
rastervision/data/label_source/__init__.py
|
Yochengliu/raster-vision
|
f5badc387df86ce02d84e0e274a08026dbf65bd6
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from rastervision.data.label_source.label_source import *
from rastervision.data.label_source.label_source_config import *
from rastervision.data.label_source.chip_classification_geojson_source import *
from rastervision.data.label_source.chip_classification_geojson_source_config import *
from rastervision.data.label_source.object_detection_geojson_source import *
from rastervision.data.label_source.object_detection_geojson_source_config import *
from rastervision.data.label_source.segmentation_class_transformer import SegmentationClassTransformer
from rastervision.data.label_source.semantic_segmentation_raster_source import *
from rastervision.data.label_source.semantic_segmentation_raster_source_config import *
| 61.583333
| 102
| 0.895805
| 92
| 739
| 6.815217
| 0.217391
| 0.192982
| 0.287081
| 0.358852
| 0.866029
| 0.866029
| 0.866029
| 0.722488
| 0.703349
| 0.424242
| 0
| 0.001431
| 0.054127
| 739
| 11
| 103
| 67.181818
| 0.895565
| 0.016238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
a4a7c9e862312c1db84142fe06d431de74200b8a
| 21,446
|
py
|
Python
|
test/test_parsers.py
|
jjfallete/cb-taxii-connector
|
31b42c8ea46d14f2af63788a8ffada0c998bdb46
|
[
"MIT"
] | 16
|
2015-09-21T18:22:00.000Z
|
2021-11-04T11:16:12.000Z
|
test/test_parsers.py
|
jjfallete/cb-taxii-connector
|
31b42c8ea46d14f2af63788a8ffada0c998bdb46
|
[
"MIT"
] | 20
|
2016-02-09T20:44:35.000Z
|
2022-03-28T20:48:09.000Z
|
test/test_parsers.py
|
jjfallete/cb-taxii-connector
|
31b42c8ea46d14f2af63788a8ffada0c998bdb46
|
[
"MIT"
] | 9
|
2015-09-28T08:12:23.000Z
|
2022-03-28T20:09:12.000Z
|
# coding: utf-8
# Copyright © 2014-2020 VMware, Inc. All Rights Reserved.
################################################################################
import unittest
import stix2patterns
from cbopensource.driver.taxii import STIXIndicator
from cbopensource.driver.taxii_parser import STIXPatternParser
# noinspection HttpUrlsUsage
class ParserTests(unittest.TestCase):
def test_parser_basic(self):
stix_object = {'created': '2014-05-08T09:00:00.000Z', 'id': 'indicator--cd981c25-8042-4166-8945-51178443bdac',
'indicator_types': ['file-hash-watchlist'], 'modified': '2014-05-08T09:00:00.000Z',
'name': 'File hash for Poison Ivy variant',
'pattern': "[file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c']",
'pattern_type': 'stix', 'spec_version': '2.1', 'type': 'indicator',
'valid_from': '2014-05-08T09:00:00.000000Z'}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-05-08T09:00:00.000Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--cd981c25-8042-4166-8945-51178443bdac'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'File hash for Poison Ivy variant'
assert 'iocs' in report
assert 'sha256' in report['iocs'] and 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c' in \
report['iocs']['sha256']
assert 'link' in report
def test_parser_basic_error_in_pattern(self):
stix_object = {'created': '2014-05-08T09:00:00.000Z', 'id': 'indicator--cd981c25-8042-4166-8945-51178443bdac',
'indicator_types': ['file-hash-watchlist'], 'modified': '2014-05-08T09:00:00.000Z',
'name': 'File hash for Poison Ivy variant',
'pattern': "afdsafdsfdafas",
'pattern_type': 'stix', 'spec_version': '2.1', 'type': 'indicator',
'valid_from': '2014-05-08T09:00:00.000000Z'}
self.assertRaises(stix2patterns.exceptions.ParseException, STIXIndicator, stix_object,
"http://server:5000/taxii2/collections/collection-id-basic")
def test_parser_basic_two_hashes(self):
stix_object = {'created': '2014-05-08T09:00:00.000Z', 'id': 'indicator--cd981c25-8042-4166-8945-51178443bdac',
'indicator_types': ['file-hash-watchlist'], 'modified': '2014-05-08T09:00:00.000Z',
'name': 'File hash for Poison Ivy variant',
'pattern': "[file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c' OR file:hashes.'SHA-256' = 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6d']",
'pattern_type': 'stix', 'spec_version': '2.1', 'type': 'indicator',
'valid_from': '2014-05-08T09:00:00.000000Z'}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-05-08T09:00:00.000Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--cd981c25-8042-4166-8945-51178443bdac'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'File hash for Poison Ivy variant'
assert 'iocs' in report
assert 'sha256' in report['iocs']
assert 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6c' in report['iocs']['sha256']
assert 'ef537f25c895bfa782526529a9b63d97aa631564d5d789c2b765448c8635fb6d' in report['iocs']['sha256']
assert 'link' in report
def test_parser_basic_dns(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[url:value = 'http://x4z9arb.cn/4712/']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'dns' in report['iocs'] and 'x4z9arb.cn' in report['iocs']['dns']
assert 'link' in report
def test_parser_basic_two_dns(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[url:value = 'http://x4z9arb.cn/4712/' OR url:value = 'http://x4z9arc.cn/4712/']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'dns' in report['iocs']
assert 'x4z9arb.cn' in report['iocs']['dns']
assert 'x4z9arc.cn' in report['iocs']['dns']
assert 'link' in report
def test_parser_basic_ip(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert 'link' in report
def test_parser_basic_ip_no_cidr(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert 'link' in report
def test_parser_basic_ip_cidr_range(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/31']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert '198.51.100.0' in report['iocs']['ipv4']
assert 'link' in report
def test_parser_complex_ip(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32' OR ipv4-addr:value = '203.0.113.33/32' OR ipv6-addr:value = '2001:0db8:dead:beef:dead:beef:dead:0001/128']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert '203.0.113.33' in report['iocs']['ipv4']
assert 'ipv6' in report['iocs']
assert '2001:0db8:dead:beef:dead:beef:dead:0001' in report['iocs']['ipv6']
assert 'link' in report
def test_parser_complex_ip_cidr_range(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32' OR ipv4-addr:value = '203.0.113.33/32' OR ipv6-addr:value = '2001:0db8:dead:beef:dead:beef:dead:0001/127']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert '203.0.113.33' in report['iocs']['ipv4']
assert 'ipv6' in report['iocs']
assert '2001:db8:dead:beef:dead:beef:dead:1' in report['iocs']['ipv6']
assert '2001:db8:dead:beef:dead:beef:dead:0' in report['iocs']['ipv6']
assert 'link' in report
def test_parser_complex_ip_with_domain(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32' OR ipv4-addr:value = '203.0.113.33/32' OR "
"ipv6-addr:value = '2001:0db8:dead:beef:dead:beef:dead:0001/128' OR domain-name:value = "
"'example.com']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' in report['iocs']
assert '198.51.100.1' in report['iocs']['ipv4']
assert '203.0.113.33' in report['iocs']['ipv4']
assert 'ipv6' in report['iocs']
assert '2001:0db8:dead:beef:dead:beef:dead:0001' in report['iocs']['ipv6']
assert 'link' in report
assert 'dns' in report['iocs'] and 'example.com' in report['iocs']['dns']
def test_parser_complex_ip_with_domain_but_address_not_enabled(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32' OR ipv4-addr:value = '203.0.113.33/32' OR "
"ipv6-addr:value = '2001:0db8:dead:beef:dead:beef:dead:0001/128' OR domain-name:value = "
"'example.com']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic",
pattern_parser=STIXPatternParser(["domain"]))
report = indicator.report
assert "timestamp" in report
assert report["timestamp"] == int(STIXIndicator.strptime('2014-06-29T13:49:37.079Z').timestamp())
assert "id" in report
assert report["id"] == 'indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f'
assert 'score' in report and report['score'] == 100
assert 'title' in report and report['title'] == 'Malicious site hosting downloader'
assert 'iocs' in report
assert 'ipv4' not in report['iocs']
assert 'ipv6' not in report['iocs']
assert 'link' in report
assert 'dns' in report['iocs'] and 'example.com' in report['iocs']['dns']
def test_parser_complex_ip_with_domain_but_nothing_enabled(self):
stix_object = {
"type": "indicator",
"spec_version": "2.1",
"id": "indicator--d81f86b9-975b-4c0b-875e-810c5ad45a4f",
"created": "2014-06-29T13:49:37.079Z",
"modified": "2014-06-29T13:49:37.079Z",
"name": "Malicious site hosting downloader",
"description": "This organized threat actor group operates to create profit from all types of crime.",
"indicator_types": [
"malicious-activity"
],
"pattern": "[ipv4-addr:value = '198.51.100.1/32' OR ipv4-addr:value = '203.0.113.33/32' OR "
"ipv6-addr:value = '2001:0db8:dead:beef:dead:beef:dead:0001/128' OR domain-name:value = "
"'example.com']",
"pattern_type": "stix",
"valid_from": "2014-06-29T13:49:37.079Z"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic",
pattern_parser=STIXPatternParser(["hash"]))
report = indicator.report
assert not report
def test_indicator_not_intelligble_to_edr(self):
stix_object = {"type": "indicator",
"spec_version": "2.1",
"id": "indicator--e26a5a10-09e4-423b-84d7-eb026c3ff482",
"created": "2021-02-14T07:10:49.000Z",
"modified": "2021-06-27T19:45:26.000Z",
"description": "Month majority nearly century manage.",
"indicator_types": [
"attribution"
],
"pattern": "[process:defanged NOT = false]",
"pattern_type": "stix",
"pattern_version": "2.1",
"valid_from": "2020-06-26T01:48:15Z",
"valid_until": "2021-05-18T13:37:59Z",
"kill_chain_phases": [
{
"kill_chain_name": "lweDuklJOhJMBoQcY",
"phase_name": "ptQuXqPySK"
}
],
"labels": [
"role",
"treat",
"fire",
"power",
"although"
],
"confidence": 22,
"lang": "en"
}
indicator = STIXIndicator(stix_object, "http://server:5000/taxii2/collections/collection-id-basic")
assert not indicator.report
if __name__ == '__main__':
unittest.main()
| 52.953086
| 223
| 0.582067
| 2,404
| 21,446
| 5.123128
| 0.086522
| 0.068204
| 0.037999
| 0.041166
| 0.911578
| 0.905408
| 0.893066
| 0.88925
| 0.884459
| 0.883891
| 0
| 0.145972
| 0.272965
| 21,446
| 404
| 224
| 53.084158
| 0.643856
| 0.004476
| 0
| 0.776316
| 0
| 0.026316
| 0.450061
| 0.153484
| 0
| 0
| 0
| 0
| 0.331579
| 1
| 0.036842
| false
| 0
| 0.010526
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f125e667cad04799159a3e7e883ea6f2e851df1a
| 1,853
|
py
|
Python
|
tests/test_requests.py
|
gwrome/dndtools
|
38d973cb44e88e267b137957b407c3e26271778d
|
[
"MIT"
] | null | null | null |
tests/test_requests.py
|
gwrome/dndtools
|
38d973cb44e88e267b137957b407c3e26271778d
|
[
"MIT"
] | null | null | null |
tests/test_requests.py
|
gwrome/dndtools
|
38d973cb44e88e267b137957b407c3e26271778d
|
[
"MIT"
] | null | null | null |
from dndtools import create_app, is_request_valid
def test_no_env_vars(client):
app = create_app()
with app.app_context():
app.config['SLACK_VERIFICATION_TOKEN'] = None
app.config['SLACK_TEAM_ID'] = None
dummy_request = {}
assert not is_request_valid(dummy_request)
def test_no_app_verification_token(client):
app = create_app()
with app.app_context():
app.config['SLACK_VERIFICATION_TOKEN'] = None
dummy_request = {}
assert not is_request_valid(dummy_request)
def test_no_app_team_id(client):
app = create_app()
with app.app_context():
app.config['SLACK_TEAM_ID'] = None
dummy_request = {}
assert not is_request_valid(dummy_request)
def test_request_tokens(client):
for route in 'condition roll spellbook'.split():
assert '401' in str(client.post('/{}'.format(route),
data=dict(text="",
team_id='wrong-test-team-id',
token='test-token',
user_id='asdf')))
assert '401' in str(client.post('/{}'.format(route),
data=dict(text="",
team_id='test-team-id',
token='wrong-test-token',
user_id='asdf')))
assert '401' in str(client.post('/{}'.format(route),
data=dict(text="",
team_id='wrong-test-team-id',
token='wrong-test-token',
user_id='asdf')))
| 39.425532
| 79
| 0.464112
| 182
| 1,853
| 4.461538
| 0.241758
| 0.066502
| 0.068966
| 0.066502
| 0.832512
| 0.832512
| 0.832512
| 0.832512
| 0.832512
| 0.832512
| 0
| 0.008539
| 0.431193
| 1,853
| 46
| 80
| 40.282609
| 0.76186
| 0
| 0
| 0.783784
| 0
| 0
| 0.117647
| 0.025904
| 0
| 0
| 0
| 0
| 0.162162
| 1
| 0.108108
| false
| 0
| 0.027027
| 0
| 0.135135
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f18170e5980c7902178c35390a814a0e41f57c42
| 18
|
py
|
Python
|
char_untils/Tst.py
|
HAIbingshuai/albert-model_attempt-
|
d8e40001910d54409932eb5a49bb36685c266a20
|
[
"MIT"
] | null | null | null |
char_untils/Tst.py
|
HAIbingshuai/albert-model_attempt-
|
d8e40001910d54409932eb5a49bb36685c266a20
|
[
"MIT"
] | null | null | null |
char_untils/Tst.py
|
HAIbingshuai/albert-model_attempt-
|
d8e40001910d54409932eb5a49bb36685c266a20
|
[
"MIT"
] | null | null | null |
import random
| 3
| 13
| 0.666667
| 2
| 18
| 6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 18
| 5
| 14
| 3.6
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
74ddf8c15ce10f354f8aec48a9f46156d2fb36f0
| 2,525
|
py
|
Python
|
assets/assets.py
|
jussihayha/python_neat_ai
|
390dd607d7301ff0dfa1dc393aab6beb23ff353f
|
[
"MIT"
] | null | null | null |
assets/assets.py
|
jussihayha/python_neat_ai
|
390dd607d7301ff0dfa1dc393aab6beb23ff353f
|
[
"MIT"
] | null | null | null |
assets/assets.py
|
jussihayha/python_neat_ai
|
390dd607d7301ff0dfa1dc393aab6beb23ff353f
|
[
"MIT"
] | null | null | null |
import pygame
pygame.font.init()
WIN_HEIGHT = 512
WIN_WIDTH = 1200
DISPLAY = pygame.display.set_mode((WIN_WIDTH, WIN_HEIGHT))
pygame.display.set_caption("INTELLIGENT HERO AI JUMPING, OK")
# HERO images
RUN = [pygame.transform.scale(pygame.image.load('./assets/hero-run-1.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-run-2.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-run-3.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-run-4.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-run-5.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-run-6.png'), (128, 128))]
JUMP = [pygame.transform.scale(pygame.image.load('./assets/hero-jump-1.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-jump-1.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-jump-2.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-jump-2.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-jump-3.png'), (128, 128)),
pygame.transform.scale(pygame.image.load('./assets/hero-jump-3.png'), (128, 128))
]
# ENEMY PICTURES
SMALL_ENEMY = [
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy1.png'), (96, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy2.png'), (96, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy3.png'), (96, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy4.png'), (96, 96)), True, False)]
LARGE_ENEMY = [
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy1.png'), (240, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy2.png'), (240, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy3.png'), (240, 96)), True, False),
pygame.transform.flip(pygame.transform.scale(pygame.image.load('./assets/enemy4.png'), (240, 96)), True, False)]
# RANDOM VARIABLES
BG = pygame.transform.scale(pygame.image.load('./assets/background.png'), (1200, 512))
BULLET = pygame.transform.scale(pygame.image.load('./assets/bullet.png'), (30, 30))
FONT = pygame.font.Font('./assets/gothic_pixel.ttf', 40)
| 60.119048
| 116
| 0.689901
| 358
| 2,525
| 4.840782
| 0.159218
| 0.259665
| 0.253895
| 0.330063
| 0.832083
| 0.816503
| 0.816503
| 0.769186
| 0.769186
| 0.741489
| 0
| 0.065026
| 0.098614
| 2,525
| 41
| 117
| 61.585366
| 0.696397
| 0.01703
| 0
| 0.0625
| 0
| 0
| 0.214689
| 0.133172
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.03125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
74e01e151948306cf69d7c6eae4b52cd6b26b96d
| 390
|
py
|
Python
|
simp_py_examples/course/S1806_4/ch_fontx.py
|
kcfkwok2003/Simp_py
|
f75e66da01b45dc8688dda602f8b33d4258f0c31
|
[
"MIT"
] | null | null | null |
simp_py_examples/course/S1806_4/ch_fontx.py
|
kcfkwok2003/Simp_py
|
f75e66da01b45dc8688dda602f8b33d4258f0c31
|
[
"MIT"
] | null | null | null |
simp_py_examples/course/S1806_4/ch_fontx.py
|
kcfkwok2003/Simp_py
|
f75e66da01b45dc8688dda602f8b33d4258f0c31
|
[
"MIT"
] | null | null | null |
CH_FONTS={
u'\u4e2d':bytearray([
0x01,0x00,0x01,0x00,0x01,0x00,0x3f,0xfc,
0x21,0x04,0x21,0x04,0x21,0x04,0x21,0x04,
0x3f,0xfc,0x21,0x04,0x01,0x00,0x01,0x00,
0x01,0x00,0x01,0x00,0x00,0x00,0x00,0x00
]),
u'\u6587':bytearray([
0x01,0x00,0x01,0x00,0x01,0x80,0x7f,0xfc,
0x18,0x10,0x08,0x20,0x0c,0x20,0x04,0x40,
0x02,0xc0,0x03,0x80,0x03,0x80,0x06,0x60,
0x38,0x38,0x60,0x0c,0x00,0x00,0x00,0x00
]),
}
| 27.857143
| 40
| 0.753846
| 72
| 390
| 4.069444
| 0.361111
| 0.245734
| 0.286689
| 0.327645
| 0.430034
| 0.430034
| 0
| 0
| 0
| 0
| 0
| 0.493369
| 0.033333
| 390
| 14
| 41
| 27.857143
| 0.28382
| 0
| 0
| 0.142857
| 0
| 0
| 0.030691
| 0
| 0
| 1
| 0.654731
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2d24af97f9e1a21489caec0e5b8bb58df3c16b71
| 73,486
|
py
|
Python
|
tests/test_Resamplers.py
|
shane-kercheval/oo-learning
|
9e3ebe5f7460179e23f6801bc01f1114bb896dea
|
[
"MIT"
] | 1
|
2020-10-09T09:11:46.000Z
|
2020-10-09T09:11:46.000Z
|
tests/test_Resamplers.py
|
shane-kercheval/oo-learning
|
9e3ebe5f7460179e23f6801bc01f1114bb896dea
|
[
"MIT"
] | 48
|
2018-04-09T01:30:31.000Z
|
2021-06-13T03:25:59.000Z
|
tests/test_Resamplers.py
|
shane-kercheval/oo-learning
|
9e3ebe5f7460179e23f6801bc01f1114bb896dea
|
[
"MIT"
] | null | null | null |
import os
import pickle
import time
from math import isclose
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import shutil
from oolearning import *
from tests.MockClassificationModelWrapper import MockClassificationModelWrapper
from tests.MockRegressionModelWrapper import MockRegressionModelWrapper
from tests.TestHelper import TestHelper
from tests.TimerTestCase import TimerTestCase
class TempDecorator(DecoratorBase):
def __init__(self):
self._repeat_index = list()
self._fold_index = list()
self._holdout_indexes = list()
self._holdout_predicted_values = pd.DataFrame()
def decorate(self, **kwargs):
self._repeat_index.append(kwargs['repeat_index'])
self._fold_index.append(kwargs['fold_index'])
self._holdout_indexes.extend(kwargs['holdout_indexes'])
self._holdout_predicted_values = self._holdout_predicted_values.append(
kwargs['holdout_predicted_values']) # noqa
class ModelDecorator(DecoratorBase):
def __init__(self):
self._model_list = list()
def decorate(self, **kwargs):
self._model_list.append(kwargs['model'])
# noinspection SpellCheckingInspection,PyMethodMayBeStatic,PyTypeChecker
class ResamplerTests(TimerTestCase):
@classmethod
def setUpClass(cls):
pass
def test_resamplers_Rmse_Mae(self):
data = TestHelper.get_cement_data()
# splitter = RegressionStratifiedDataSplitter(test_ratio=0.2)
# training_indexes, test_indexes = splitter.split(target_values=data.strength)
train_data = data
train_data_y = train_data.strength
train_data = train_data.drop(columns='strength')
# test_data = data.iloc[test_indexes]
# test_data_y = test_data.strength
# test_data = test_data.drop(columns='strength')
resampler = RepeatedCrossValidationResampler(
model=LinearRegressorSK(),
transformations=[ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.DUMMY)],
scores=[RmseScore(),
MaeScore()],
folds=5,
repeats=5,
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y)
TestHelper.save_string(resampler.results,
'data/test_Resamplers/test_resamplers_Rmse_Mae_string.txt')
assert len(resampler.results._scores) == 25
assert all([len(x) == 2 and
isinstance(x[0], RmseScore) and
isinstance(x[1], MaeScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
assert resampler.results.score_names == ['RMSE', 'MAE']
assert isclose(resampler.results.score_means['RMSE'], 10.459344010622544)
assert isclose(resampler.results.score_means['MAE'], 8.2855537849498742)
assert isclose(resampler.results.score_standard_deviations['RMSE'], 0.5716680069548794)
assert isclose(resampler.results.score_standard_deviations['MAE'], 0.46714447004190812)
assert isclose(resampler.results.score_coefficients_of_variation['RMSE'], round(0.5716680069548794 / 10.459344010622544, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['MAE'], round(0.46714447004190812 / 8.2855537849498742, 2)) # noqa
actual_cross_validations = resampler.results.resampled_scores
file = os.path.join(os.getcwd(), TestHelper.ensure_test_directory('data/test_Resamplers/test_resamplers_Rmse_Mae_cross_validation_scores.pkl')) # noqa
# with open(file, 'wb') as output:
# pickle.dump(actual_cross_validations, output, pickle.HIGHEST_PROTOCOL)
with open(file, 'rb') as saved_object:
expected_cross_validations = pickle.load(saved_object)
assert TestHelper.ensure_all_values_equal(data_frame1=expected_cross_validations,
data_frame2=actual_cross_validations)
def test_resamplers_Mock_regression(self):
data = TestHelper.get_cement_data()
# splitter = RegressionStratifiedDataSplitter(test_ratio=0.2)
# training_indexes, test_indexes = splitter.split(target_values=data.strength)
train_data = data
train_data_y = train_data.strength
train_data = train_data.drop(columns='strength')
# test_data = data.iloc[test_indexes]
# test_data_y = test_data.strength
# test_data = test_data.drop(columns='strength')
resampler = RepeatedCrossValidationResampler(
model=MockRegressionModelWrapper(data_y=data.strength),
transformations=[ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.DUMMY)],
scores=[RmseScore(),
MaeScore()],
folds=5,
repeats=5,
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y)
assert len(resampler.results._scores) == 25
assert all([len(x) == 2 and
isinstance(x[0], RmseScore) and
isinstance(x[1], MaeScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
assert resampler.results.score_names == ['RMSE', 'MAE']
assert isclose(resampler.results.score_means['RMSE'], 23.776598887994158)
assert isclose(resampler.results.score_means['MAE'], 19.030724889732316)
assert isclose(resampler.results.score_standard_deviations['RMSE'], 0.91016288102942078)
assert isclose(resampler.results.score_standard_deviations['MAE'], 0.77294039453317798)
assert isclose(resampler.results.score_coefficients_of_variation['RMSE'], round(0.91016288102942078 / 23.776598887994158, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['MAE'], round(0.77294039453317798 / 19.030724889732316, 2)) # noqa
def test_resamplers_Mock_classification(self):
data = TestHelper.get_titanic_data()
# main reason we want to split the data is to get the means/st_devs so that we can confirm with
# e.g. the Searcher
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, test_indexes = splitter.split(target_values=data.Survived)
train_data = data.iloc[training_indexes]
train_data_y = train_data.Survived
train_data = train_data.drop(columns='Survived')
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))] # noqa
resampler = RepeatedCrossValidationResampler(
model=MockClassificationModelWrapper(data_y=data.Survived),
transformations=None,
scores=score_list,
folds=5,
repeats=5,
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y)
assert len(resampler.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
assert isclose(resampler.results.score_means['kappa'], 0.0013793651663756446)
assert isclose(resampler.results.score_means['sensitivity'], 0.34802926509722726)
assert isclose(resampler.results.score_means['specificity'], 0.65307336918498493)
assert isclose(resampler.results.score_means['error_rate'], 0.46314142734094416)
assert isclose(resampler.results.score_standard_deviations['kappa'], 0.055624736458973652)
assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.036787308260115267)
assert isclose(resampler.results.score_standard_deviations['specificity'], 0.019357626459983342)
assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.025427045943705647)
assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.055624736458973652 / 0.0013793651663756446, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.036787308260115267 / 0.34802926509722726, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.019357626459983342 / 0.65307336918498493, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.025427045943705647 / 0.46314142734094416, 2)) # noqa
# varify same values as dicts
assert all(resampler.results.score_stats.columns == resampler.results.score_names)
assert resampler.results.score_stats.loc['means'].to_dict() == resampler.results.score_means
assert resampler.results.score_stats.loc['standard deviations'].to_dict() == resampler.results.score_standard_deviations # noqa
assert resampler.results.score_stats.loc['coefficients of variation'].to_dict() == resampler.results.score_coefficients_of_variation # noqa
file = os.path.join(os.getcwd(), TestHelper.ensure_test_directory('data/test_Resamplers/test_resamplers_score_stats.pkl')) # noqa
# with open(file, 'wb') as output:
# pickle.dump(resampler.results.score_stats, output, pickle.HIGHEST_PROTOCOL)
with open(file, 'rb') as saved_object:
expected_score_stats = pickle.load(saved_object)
assert TestHelper.ensure_all_values_equal(data_frame1=expected_score_stats,
data_frame2=resampler.results.score_stats)
def test_Resampler_callback(self):
# make sure that the Resampler->train_callback works
data = TestHelper.get_cement_data()
target_variable = 'strength'
# noinspection PyUnusedLocal
def train_callback(data_x, data_y, hyper_params):
raise NotImplementedError()
score_list = [RmseScore(), MaeScore()]
transformations = [RemoveColumnsTransformer(['coarseagg', 'fineagg']), ImputationTransformer(), DummyEncodeTransformer()] # noqa
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
folds=5,
repeats=5,
train_callback=train_callback)
# should raise an error from the callback definition above
self.assertRaises(NotImplementedError, lambda: resampler.resample(data_x=data.drop(columns=target_variable), data_y=data[target_variable], hyper_params=None)) # noqa
######################################################################################################
# With parallelization, the Resampler should fail with CallbackUsedWithParallelizationError
######################################################################################################
score_list = [RmseScore(), MaeScore()]
transformations = [RemoveColumnsTransformer(['coarseagg', 'fineagg']), ImputationTransformer(),
DummyEncodeTransformer()] # noqa
self.assertRaises(CallbackUsedWithParallelizationError,
lambda: RepeatedCrossValidationResampler(model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
folds=5,
repeats=5,
train_callback=train_callback,
parallelization_cores=-1))
def test_Resampler_transformations(self):
# intent of this test is to ensure that the data is being transformed according to the
# transformations being passed in.
# make sure that the Resampler->train_callback works
data = TestHelper.get_cement_data()
target_variable = 'strength'
# create random missing values and extra field
np.random.seed(42)
missing_indexes_cement = np.random.randint(low=0, high=len(data), size=int(len(data) * 0.10))
data.loc[missing_indexes_cement, 'cement'] = None
np.random.seed(43)
missing_indexes_ash = np.random.randint(low=0, high=len(data), size=int(len(data) * 0.10))
data.loc[missing_indexes_ash, 'ash'] = None
np.random.seed(42)
random_codes = np.random.randint(low=0, high=2, size=len(data))
data['random'] = ['code0' if random_code == 0 else 'code1' for random_code in random_codes]
assert data.isna().sum().sum() == 195
data_x = data.drop(columns=target_variable)
data_y = data[target_variable]
######################################################################################################
# make sure the data that we pass to `train()` in the ModelWrapper is transformed
# then make sure what we get in the callback matches the transformed data
######################################################################################################
test_pipeline = TransformerPipeline(
transformations=[RemoveColumnsTransformer(['coarseagg', 'fineagg']), # noqa
ImputationTransformer(),
DummyEncodeTransformer()])
transformed_data = test_pipeline.fit_transform(data_x=data_x)
# make sure our test transformations are transformed as expected (although this should already be
# tested in test_Transformations file
assert all(transformed_data.columns.values == ['cement', 'slag', 'ash', 'water', 'superplastic', 'age', 'random_code1']) # noqa
assert OOLearningHelpers.is_series_numeric(variable=transformed_data.random_code1)
assert transformed_data.isna().sum().sum() == 0
# this callback will be called by the ModelWrapper before fitting the model
# the callback gives us back the data that it will pass to the underlying model
# so we can make sure it matches what we expect
def train_callback(data_x_test, data_y_test, hyper_params):
assert hyper_params is None
# noinspection PyTypeChecker
assert all(data_y == data_y_test)
# make sure transformations happened
assert all(data_x_test.columns.values == ['cement', 'slag', 'ash', 'water', 'superplastic', 'age', 'random_code1']) # noqa
score_list = [RmseScore(), MaeScore()]
transformations = [RemoveColumnsTransformer(['coarseagg', 'fineagg']), ImputationTransformer(), DummyEncodeTransformer()] # noqa
resampler = RepeatedCrossValidationResampler(
model=MockRegressionModelWrapper(data_y=data_y),
transformations=transformations,
scores=score_list,
folds=5,
repeats=5,
train_callback=train_callback)
# the train_callback method will be triggered and will cause an assertion error if the data that is
# going to be trained does not match the data previously transformed
resampler.resample(data_x=data.drop(columns=target_variable), data_y=data[target_variable], hyper_params=None) # noqa
def test_Resampler_fold_indexes(self):
# test that the resampler uses the same fold index across objects. Test that the indexes are
# maintained in the predicted values (only applicable for dataframes i.e. classification)
data = TestHelper.get_titanic_data()
# main reason we want to split the data is to get the means/st_devs so that we can confirm with
# e.g. the Searcher
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, _ = splitter.split(target_values=data.Survived)
train_data_y = data.iloc[training_indexes].Survived
train_data = data.iloc[training_indexes].drop(columns='Survived')
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
decorator = TempDecorator()
resampler = RepeatedCrossValidationResampler(
model=MockClassificationModelWrapper(data_y=data.Survived),
transformations=None,
scores=score_list,
folds=5,
repeats=2,
fold_decorators=[decorator])
resampler.resample(data_x=train_data, data_y=train_data_y)
assert decorator._repeat_index == [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]
assert decorator._fold_index == [0, 1, 2, 3, 4, 0, 1, 2, 3, 4]
# The _holdout_indexes should have twice the number of indexes as training_indexes because of
# `repeats=2`
num_fold_holdout_indexes = len(decorator._holdout_indexes)
num_training_indexes = len(training_indexes)
assert num_training_indexes * 2 == num_fold_holdout_indexes
assert len(set(training_indexes)) == num_training_indexes
# get the holdout indexes from the first repeat. This should contain exactly 1 to 1 indexes with the
# original training indexes, although not in the same order
repeat_0_holdout_indexes = decorator._holdout_indexes[0:int(num_fold_holdout_indexes / 2)]
assert len(repeat_0_holdout_indexes) == num_training_indexes
# check that the training indexes and holdout indexes from the first repeat contain the same values
assert set(training_indexes) == set(repeat_0_holdout_indexes)
repeat_1_holdout_indexes = decorator._holdout_indexes[int(num_fold_holdout_indexes / 2): num_fold_holdout_indexes] # noqa
assert len(repeat_1_holdout_indexes) == num_training_indexes
# check that the training indexes and holdout indexes from the second repeat contain the same values
assert set(training_indexes) == set(repeat_1_holdout_indexes)
# at this point we know that both repeats contain the indexes from the original training set
# this should correspond to the indexes of the predicted values DataFrame
# first, lets merge the indexes from repeats, and assign into a different list, also used below
repeat_0_holdout_indexes.extend(repeat_1_holdout_indexes)
holdout_indexes = repeat_0_holdout_indexes
assert len(decorator._holdout_predicted_values.index.values) == len(holdout_indexes)
assert all(decorator._holdout_predicted_values.index.values == holdout_indexes)
# lets repeat the same procedure to verify that the indexes are the same across resampler objects
decorator = TempDecorator()
resampler = RepeatedCrossValidationResampler(
model=MockClassificationModelWrapper(data_y=data.Survived),
transformations=None,
scores=score_list,
folds=5,
repeats=2,
fold_decorators=[decorator])
resampler.resample(data_x=train_data, data_y=train_data_y)
# test that NEW decorator object's predicted value dataframe has the same indexes it previously did
assert all(decorator._holdout_predicted_values.index.values == holdout_indexes)
# def test_Resampler_fold_indexes_parallelized(self):
# # NOTE: when using parallelization, the decorator is copied to the process, so the original object
# # will not retain data, like it does in non-parllelization
# # need to use the decorators passed back in `.fold_decorators` property
# data = TestHelper.get_titanic_data()
#
# # main reason we want to split the data is to get the means/st_devs so that we can confirm with
# # e.g. the Searcher
# splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
# training_indexes, _ = splitter.split(target_values=data.Survived)
#
# train_data_y = data.iloc[training_indexes].Survived
# train_data = data.iloc[training_indexes].drop(columns='Survived')
#
# score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
#
# decorator = TempDecorator()
# resampler = RepeatedCrossValidationResampler(
# model=MockClassificationModelWrapper(data_y=data.Survived),
# transformations=None,
# scores=score_list,
# folds=5,
# repeats=2,
# fold_decorators=[decorator],
# parallelization_cores=-1)
# resampler.resample(data_x=train_data, data_y=train_data_y)
#
# # decorator object is not used directly when using parallization, it is copied
# assert len(decorator._repeat_index) == 0
# assert len(decorator._fold_index) == 0
#
# # we should have 1 set/list of decorators for each repeat
# assert len(resampler.decorators) == 2 # 2 because we have 2 repeats
# assert resampler.decorators[0]._repeat_index == [0, 0, 0, 0, 0]
# assert resampler.decorators[0]._fold_index == [0, 1, 2, 3, 4]
#
# assert resampler.decorators[1]._repeat_index == [1, 1, 1, 1, 1]
# assert resampler.decorators[1]._fold_index == [0, 1, 2, 3, 4]
#
# # The fold holdout indexes should have twice the number of indexes as training_indexes because of
# # `repeats=2`
# num_fold_holdout_indexes = len(resampler.decorators[0]._holdout_indexes)
# num_training_indexes = len(training_indexes)
# # these values should be equal since the fold holdout is for a single repeat
# assert num_training_indexes == num_fold_holdout_indexes
# num_fold_holdout_indexes = len(resampler.decorators[1]._holdout_indexes)
# num_training_indexes = len(training_indexes)
# # these values should be equal since the fold holdout is for a single repeat
# assert num_training_indexes == num_fold_holdout_indexes
# # get the holdout indexes from the first repeat. This should contain exactly 1 to 1 indexes with the
# # original training indexes, although not in the same order
# fold_holdout_indexes = resampler.decorators[0]._holdout_indexes
# assert set(training_indexes) == set(fold_holdout_indexes)
# fold_holdout_indexes = resampler.decorators[1]._holdout_indexes
# assert set(training_indexes) == set(fold_holdout_indexes)
# # at this point we know that both repeats contain the indexes from the original training set
# # this should correspond to the indexes of the predicted values DataFrame
# # first, lets merge the indexes from repeats, and assign into a different list, also used below
# holdout_df = resampler.decorators[0]._holdout_predicted_values
# fold_holdout_indexes = resampler.decorators[0]._holdout_indexes
# assert len(holdout_df.values) == len(fold_holdout_indexes)
# # noinspection PyTypeChecker
# assert all(holdout_df.index.values == fold_holdout_indexes)
#
# holdout_df = resampler.decorators[1]._holdout_predicted_values
# fold_holdout_indexes = resampler.decorators[1]._holdout_indexes
# assert len(holdout_df.values) == len(fold_holdout_indexes)
# # noinspection PyTypeChecker
# assert all(holdout_df.index.values == fold_holdout_indexes)
def test_resamplers_RandomForest_classification(self):
data = TestHelper.get_titanic_data()
# main reason we want to split the data is to get the means/st_devs so that we can confirm with
# e.g. the Searcher
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, test_indexes = splitter.split(target_values=data.Survived)
train_data = data.iloc[training_indexes]
train_data_y = train_data.Survived
train_data = train_data.drop(columns='Survived')
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))] # noqa
cache_directory = TestHelper.ensure_test_directory('data/test_Resamplers/cached_test_models/test_resamplers_RandomForest_classification') # noqa
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
model_persistence_manager=LocalCacheManager(cache_directory=cache_directory),
folds=5,
repeats=5)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
assert len(resampler.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
# noinspection SpellCheckingInspection
expected_file = 'repeat{0}_fold{1}_RandomForestClassifier_n_estimators500_criteriongini_max_featuresNone_max_depthNone_min_samples_split2_min_samples_leaf1_min_weight_fraction_leaf0.0_max_leaf_nodesNone_min_impurity_decrease0.0_bootstrapTrue_oob_scoreFalse.pkl' # noqa
for fold_index in range(5):
for repeat_index in range(5):
assert os.path.isfile(os.path.join(cache_directory,
expected_file.format(fold_index, repeat_index)))
assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
# make sure the order of the resampled_scores is the same order as Evaluators passed in
assert all(resampler.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
# score_means and score_standard_deviations comes from resampled_scores, so testing both
assert isclose(resampler.results.score_means['kappa'], 0.586495320545703)
assert isclose(resampler.results.score_means['sensitivity'], 0.721899136052689)
assert isclose(resampler.results.score_means['specificity'], 0.8617441563168404)
assert isclose(resampler.results.score_means['error_rate'], 0.192053148900336)
assert isclose(resampler.results.score_standard_deviations['kappa'], 0.06833478821655113)
assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
assert isclose(resampler.results.score_standard_deviations['specificity'], 0.03664756028501139)
assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.031189357324296424)
assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
plt.gcf().clear()
TestHelper.check_plot('data/test_Resamplers/test_resamplers_RandomForest_classification_cv_boxplot.png', # noqa
lambda: resampler.results.plot_resampled_scores())
# def test_resamplers_RandomForest_classification_cached_parallization(self):
# data = TestHelper.get_titanic_data()
#
# # main reason we want to split the data is to get the means/st_devs so that we can confirm with
# # e.g. the Searcher
# splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
# training_indexes, test_indexes = splitter.split(target_values=data.Survived)
#
# train_data = data.iloc[training_indexes]
# train_data_y = train_data.Survived
# train_data = train_data.drop(columns='Survived')
#
# transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
# CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
# ImputationTransformer(),
# DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
#
# score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
# SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
# SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
# ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))] # noqa
#
# cache_directory = TestHelper.ensure_test_directory('data/test_Resamplers/cached_test_models/test_resamplers_RandomForest_classification') # noqa
# resampler = RepeatedCrossValidationResampler(
# model=RandomForestClassifier(),
# transformations=transformations,
# scores=score_list,
# model_persistence_manager=LocalCacheManager(cache_directory=cache_directory),
# folds=5,
# repeats=5,
# parallelization_cores=-1)
#
# self.assertRaises(ModelNotFittedError, lambda: resampler.results)
#
# time_start = time.time()
# resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
# time_stop = time.time()
# # assert (time_stop - time_start) < 3
#
# assert len(resampler.results._scores) == 25
# assert all([len(x) == 4 and
# isinstance(x[0], KappaScore) and
# isinstance(x[1], SensitivityScore) and
# isinstance(x[2], SpecificityScore) and
# isinstance(x[3], ErrorRateScore)
# for x in resampler.results._scores])
# assert resampler.results.num_resamples == 25
#
# # noinspection SpellCheckingInspection
# expected_file = 'repeat{0}_fold{1}_RandomForestClassifier_n_estimators500_criteriongini_max_featuresNone_max_depthNone_min_samples_split2_min_samples_leaf1_min_weight_fraction_leaf0.0_max_leaf_nodesNone_min_impurity_decrease0_bootstrapTrue_oob_scoreFalse.pkl' # noqa
# for fold_index in range(5):
# for repeat_index in range(5):
# assert os.path.isfile(os.path.join(cache_directory,
# expected_file.format(fold_index, repeat_index)))
#
# assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
#
# # make sure the order of the resampled_scores is the same order as Evaluators passed in
# assert all(resampler.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
#
# # score_means and score_standard_deviations comes from resampled_scores, so testing both
# assert isclose(resampler.results.score_means['kappa'], 0.586495320545703)
# assert isclose(resampler.results.score_means['sensitivity'], 0.721899136052689)
# assert isclose(resampler.results.score_means['specificity'], 0.8617441563168404)
# assert isclose(resampler.results.score_means['error_rate'], 0.192053148900336)
#
# assert isclose(resampler.results.score_standard_deviations['kappa'], 0.06833478821655113)
# assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
# assert isclose(resampler.results.score_standard_deviations['specificity'], 0.03664756028501139)
# assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.031189357324296424)
#
# assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
#
# plt.gcf().clear()
# TestHelper.check_plot('data/test_Resamplers/test_resamplers_RandomForest_classification_cv_boxplot.png', # noqa
# lambda: resampler.results.plot_resampled_scores())
# def test_resamplers_RandomForest_classification_parallization(self):
# data = TestHelper.get_titanic_data()
#
# # main reason we want to split the data is to get the means/st_devs so that we can confirm with
# # e.g. the Searcher
# splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
# training_indexes, test_indexes = splitter.split(target_values=data.Survived)
#
# train_data = data.iloc[training_indexes]
# train_data_y = train_data.Survived
# train_data = train_data.drop(columns='Survived')
#
# transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
# CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
# ImputationTransformer(),
# DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
#
# score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
# SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
# SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)), # noqa
# ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))] # noqa
#
# resampler = RepeatedCrossValidationResampler(
# model=RandomForestClassifier(),
# transformations=transformations,
# scores=score_list,
# folds=5,
# repeats=5,
# parallelization_cores=-1)
#
# self.assertRaises(ModelNotFittedError, lambda: resampler.results)
#
# time_start = time.time()
# resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
# time_stop = time.time()
#
# if not TestHelper.is_debugging():
# assert (time_stop - time_start) < 15 # goes from ~30 sec to < 10 with parallelization
#
# TestHelper.save_string(resampler.results,
# 'data/test_Resamplers/test_resamplers_RandomForest_classification_parallization_string.txt') # noqa
#
# assert len(resampler.results._scores) == 25
# assert all([len(x) == 4 and
# isinstance(x[0], KappaScore) and
# isinstance(x[1], SensitivityScore) and
# isinstance(x[2], SpecificityScore) and
# isinstance(x[3], ErrorRateScore)
# for x in resampler.results._scores])
# assert resampler.results.num_resamples == 25
#
# # noinspection SpellCheckingInspection
# assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
#
# # make sure the order of the resampled_scores is the same order as Evaluators passed in
# assert all(resampler.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
#
# # score_means and score_standard_deviations comes from resampled_scores, so testing both
# assert isclose(resampler.results.score_means['kappa'], 0.586495320545703)
# assert isclose(resampler.results.score_means['sensitivity'], 0.721899136052689)
# assert isclose(resampler.results.score_means['specificity'], 0.8617441563168404)
# assert isclose(resampler.results.score_means['error_rate'], 0.192053148900336)
#
# assert isclose(resampler.results.score_standard_deviations['kappa'], 0.06833478821655113)
# assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
# assert isclose(resampler.results.score_standard_deviations['specificity'], 0.03664756028501139)
# assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.031189357324296424)
#
# assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
# assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
# noinspection PyTypeChecker
def test_resampling_roc_pr_thresholds(self):
decorator = TwoClassThresholdDecorator()
# resampler gets the positive class from either the score directly, or the score._converter; test
# using both score types (e.g. AucX & Kappa)
data = TestHelper.get_titanic_data()
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, test_indexes = splitter.split(target_values=data.Survived)
train_data_y = data.iloc[training_indexes].Survived
train_data = data.iloc[training_indexes].drop(columns='Survived')
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
folds=5,
repeats=1,
fold_decorators=[decorator],
parallelization_cores=-1)
# self.assertRaises(AssertionError,
# lambda: resampler.resample(data_x=train_data,
# data_y=train_data_y,
# hyper_params=RandomForestHP()))
# redefine resampler without parallelization
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
folds=5,
repeats=1,
fold_decorators=[decorator])
start_time = time.time()
resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
resample_time = time.time() - start_time
# assert resample_time < 20 # Non-Parallelization: ~31 seconds; Parallelization: ~12 seconds
TestHelper.save_string(resampler.results,
'data/test_Resamplers/test_resampling_roc_pr_thresholds_string.txt')
expected_roc_thresholds = [0.43, 0.31, 0.47, 0.59, 0.48]
expected_precision_recall_thresholds = [0.43, 0.53, 0.64, 0.59, 0.6]
assert decorator.roc_ideal_thresholds == expected_roc_thresholds
assert decorator.precision_recall_ideal_thresholds == expected_precision_recall_thresholds
assert isclose(decorator.roc_threshold_mean, np.mean(expected_roc_thresholds))
assert isclose(decorator.precision_recall_threshold_mean, np.mean(expected_precision_recall_thresholds)) # noqa
assert isclose(decorator.roc_threshold_st_dev, np.std(expected_roc_thresholds))
assert isclose(decorator.precision_recall_threshold_st_dev, np.std(expected_precision_recall_thresholds)) # noqa
assert isclose(decorator.roc_threshold_cv, round(np.std(expected_roc_thresholds) / np.mean(expected_roc_thresholds), 2)) # noqa
assert isclose(decorator.precision_recall_threshold_cv, round(np.std(expected_precision_recall_thresholds) / np.mean(expected_precision_recall_thresholds), 2)) # noqa
# the object should be stored in the results as the first and only decorator element
assert len(resampler.results.decorators) == 1
assert resampler.results.decorators[0] is decorator # should be the same objects
# Test AucX (just test 2 folds, to make sure it finds `positive_class` (takes too long to test more)
decorator = TwoClassThresholdDecorator()
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [AucRocScore(positive_class=1)]
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
folds=2,
repeats=1,
fold_decorators=[decorator])
resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
expected_roc_thresholds = [0.35, 0.48]
expected_precision_recall_thresholds = [0.47, 0.48]
assert decorator.roc_ideal_thresholds == expected_roc_thresholds
assert decorator.precision_recall_ideal_thresholds == expected_precision_recall_thresholds
# the object should be stored in the results as the first and only decorator element
assert len(resampler.results.decorators) == 1
assert resampler.results.decorators[0] is decorator # should be the same objects
# Test DummyClassifier; utilize edge cases
decorator = TwoClassThresholdDecorator()
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [AucRocScore(positive_class=1)]
resampler = RepeatedCrossValidationResampler(
model=DummyClassifier(strategy=DummyClassifierStrategy.MOST_FREQUENT),
transformations=transformations,
scores=score_list,
folds=2,
repeats=1,
fold_decorators=[decorator])
resampler.resample(data_x=train_data, data_y=train_data_y)
expected_roc_thresholds = [0.0, 0.0]
expected_precision_recall_thresholds = [0.0, 0.0]
assert decorator.roc_ideal_thresholds == expected_roc_thresholds
assert decorator.precision_recall_ideal_thresholds == expected_precision_recall_thresholds
# the object should be stored in the results as the first and only decorator element
assert len(resampler.results.decorators) == 1
assert resampler.results.decorators[0] is decorator # should be the same objects
# noinspection PyTypeChecker
# def test_resampling_roc_pr_thresholds_resampler_parallelization(self):
# ######################################################################################################
# # turn off parallelization for TwoClassThresholdDecorator and on for RepeatedCrossValidationResampler
# ######################################################################################################
# decorator = TwoClassThresholdDecorator(parallelization_cores=0) # turn off parallelization
# # resampler gets the positive class from either the score directly, or the score._converter; test
# # using both score types (e.g. AucX & Kappa)
# data = TestHelper.get_titanic_data()
# splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
# training_indexes, test_indexes = splitter.split(target_values=data.Survived)
#
# train_data_y = data.iloc[training_indexes].Survived
# train_data = data.iloc[training_indexes].drop(columns='Survived')
#
# transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
# CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
# ImputationTransformer(),
# DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
#
# score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
# resampler = RepeatedCrossValidationResampler(
# model=RandomForestClassifier(),
# transformations=transformations,
# scores=score_list,
# folds=5,
# repeats=1,
# fold_decorators=[decorator],
# parallelization_cores=-1) # turn on parallelization, even though it won't help because 1 repeat
#
# # start_time = time.time()
# resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
# # resample_time = time.time() - start_time
#
# expected_roc_thresholds = [0.43, 0.31, 0.47, 0.59, 0.48]
# expected_precision_recall_thresholds = [0.43, 0.53, 0.64, 0.59, 0.6]
#
# ######################################################################################################
# # NOTE: because we used parallelization with the resampler, the original decorator was not used;
# # it was copied into the process, so we have to get the saved decorators (per fold) from
# # `fold_decorators`
# # Because only 1 repeat was used, there is only 1 and it should match what we expected from the
# # decorator object had we not used parallelization; if there were multiple repeats, we'd have
# # multiple fold_decorator items that we would have to concatenate (or flatten) to get the equivalent
# # of the non-parallelization scenario
# ######################################################################################################
# decorator = resampler.decorators[0]
#
# assert decorator.roc_ideal_thresholds == expected_roc_thresholds
# assert decorator.precision_recall_ideal_thresholds == expected_precision_recall_thresholds
# assert isclose(decorator.roc_threshold_mean, np.mean(expected_roc_thresholds))
# assert isclose(decorator.precision_recall_threshold_mean, np.mean(expected_precision_recall_thresholds)) # noqa
# assert isclose(decorator.roc_threshold_st_dev, np.std(expected_roc_thresholds))
# assert isclose(decorator.precision_recall_threshold_st_dev, np.std(expected_precision_recall_thresholds)) # noqa
# assert isclose(decorator.roc_threshold_cv, round(np.std(expected_roc_thresholds) / np.mean(expected_roc_thresholds), 2)) # noqa
# assert isclose(decorator.precision_recall_threshold_cv, round(np.std(expected_precision_recall_thresholds) / np.mean(expected_precision_recall_thresholds), 2)) # noqa
#
# # the object should be stored in the results as the first and only decorator element
# assert len(resampler.results.decorators) == 1
# assert resampler.results.decorators[0] is decorator # should be the same objects
def test_resampler_results_caching_without_model_cacher(self):
data = TestHelper.get_titanic_data()
# main reason we want to split the data is to get the means/st_devs so that we can confirm with
# e.g. the Searcher
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, _ = splitter.split(target_values=data.Survived)
train_data = data.iloc[training_indexes]
train_data_y = train_data.Survived
train_data = train_data.drop(columns='Survived')
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
cache_directory = TestHelper.ensure_test_directory('data/test_Resamplers/cached_resampler/')
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
results_persistence_manager=LocalCacheManager(cache_directory=cache_directory, key='test'),
folds=5,
repeats=5,
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
assert len(resampler.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
expected_file = 'test.pkl'
assert os.path.isfile(os.path.join(cache_directory, expected_file))
assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
# make sure the order of the resampled_scores is the same order as Evaluators passed in
assert all(resampler.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
# score_means and score_standard_deviations comes from resampled_scores, so testing both
assert isclose(resampler.results.score_means['kappa'], 0.586495320545703)
assert isclose(resampler.results.score_means['sensitivity'], 0.721899136052689)
assert isclose(resampler.results.score_means['specificity'], 0.8617441563168404)
assert isclose(resampler.results.score_means['error_rate'], 0.192053148900336)
assert isclose(resampler.results.score_standard_deviations['kappa'], 0.06833478821655113)
assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
assert isclose(resampler.results.score_standard_deviations['specificity'], 0.03664756028501139)
assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.031189357324296424)
assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
######################################################################################################
# Now do again with new resampler that gets cached results
######################################################################################################
# we should be abble to pass in a different model (have to pass in a model); no transformations, etc.
# and still get back the same results, this is how we know the results are cached and correctly
# retreived
# noinspection PyTypeChecker
resampler_cached = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=None, # different
scores=[], # different
results_persistence_manager=LocalCacheManager(cache_directory=cache_directory, key='test'),
folds=1, # different
repeats=1, # different
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler_cached.results)
time_start = time.time()
# noinspection PyTypeChecker
resampler_cached.resample(data_x=None, data_y=None, hyper_params=None)
time_stop = time.time()
assert (time_stop - time_start) < 1
assert len(resampler_cached.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler_cached.results._scores])
assert resampler_cached.results.num_resamples == 25
assert os.path.isfile(os.path.join(cache_directory, expected_file))
assert resampler_cached.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
# make sure the order of the resampled_scores is the same order as Evaluators passed in
assert all(resampler_cached.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
# score_means and score_standard_deviations comes from resampled_scores, so testing both
assert isclose(resampler_cached.results.score_means['kappa'], 0.586495320545703)
assert isclose(resampler_cached.results.score_means['sensitivity'], 0.721899136052689)
assert isclose(resampler_cached.results.score_means['specificity'], 0.8617441563168404)
assert isclose(resampler_cached.results.score_means['error_rate'], 0.192053148900336)
assert isclose(resampler_cached.results.score_standard_deviations['kappa'], 0.06833478821655113)
assert isclose(resampler_cached.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
assert isclose(resampler_cached.results.score_standard_deviations['specificity'], 0.03664756028501139)
assert isclose(resampler_cached.results.score_standard_deviations['error_rate'], 0.031189357324296424)
assert isclose(resampler_cached.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
shutil.rmtree(cache_directory)
def test_resampler_results_caching_with_model_cacher(self):
data = TestHelper.get_titanic_data()
# main reason we want to split the data is to get the means/st_devs so that we can confirm with
# e.g. the Searcher
splitter = ClassificationStratifiedDataSplitter(holdout_ratio=0.25)
training_indexes, _ = splitter.split(target_values=data.Survived)
train_data = data.iloc[training_indexes]
train_data_y = train_data.Survived
train_data = train_data.drop(columns='Survived')
transformations = [RemoveColumnsTransformer(['PassengerId', 'Name', 'Ticket', 'Cabin']),
CategoricConverterTransformer(['Pclass', 'SibSp', 'Parch']),
ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.ONE_HOT)]
score_list = [KappaScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SensitivityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
SpecificityScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1)),
ErrorRateScore(converter=TwoClassThresholdConverter(threshold=0.5, positive_class=1))]
model_cache_directory = TestHelper.ensure_test_directory('data/test_Resamplers/temp_model_cache/')
resampler_cache_directory = TestHelper.ensure_test_directory('data/test_Resamplers/cached_resampler/')
resampler = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=transformations,
scores=score_list,
model_persistence_manager=LocalCacheManager(cache_directory=model_cache_directory),
results_persistence_manager=LocalCacheManager(cache_directory=resampler_cache_directory,
key='test'),
folds=5,
repeats=5,
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler.results)
resampler.resample(data_x=train_data, data_y=train_data_y, hyper_params=RandomForestHP())
assert len(resampler.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler.results._scores])
assert resampler.results.num_resamples == 25
expected_file = 'repeat{0}_fold{1}_RandomForestClassifier_n_estimators500_criteriongini_max_featuresNone_max_depthNone_min_samples_split2_min_samples_leaf1_min_weight_fraction_leaf0.0_max_leaf_nodesNone_min_impurity_decrease0.0_bootstrapTrue_oob_scoreFalse.pkl' # noqa
for fold_index in range(5):
for repeat_index in range(5):
assert os.path.isfile(os.path.join(model_cache_directory,
expected_file.format(fold_index, repeat_index)))
# now that we have verify model caching works, we shouldn't need the models since the resampler is
# cached
shutil.rmtree(model_cache_directory)
expected_file = 'test.pkl'
assert os.path.isfile(os.path.join(resampler_cache_directory, expected_file))
assert resampler.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
# make sure the order of the resampled_scores is the same order as Evaluators passed in
assert all(resampler.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
# score_means and score_standard_deviations comes from resampled_scores, so testing both
assert isclose(resampler.results.score_means['kappa'], 0.586495320545703)
assert isclose(resampler.results.score_means['sensitivity'], 0.721899136052689)
assert isclose(resampler.results.score_means['specificity'], 0.8617441563168404)
assert isclose(resampler.results.score_means['error_rate'], 0.192053148900336)
assert isclose(resampler.results.score_standard_deviations['kappa'], 0.06833478821655113)
assert isclose(resampler.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
assert isclose(resampler.results.score_standard_deviations['specificity'], 0.03664756028501139)
assert isclose(resampler.results.score_standard_deviations['error_rate'], 0.031189357324296424)
assert isclose(resampler.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
assert isclose(resampler.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
######################################################################################################
# Now do again with new resampler that gets cached results
######################################################################################################
# we should be abble to pass in a different model (have to pass in a model); no transformations, etc.
# and still get back the same results, this is how we know the results are cached and correctly
# retreived
# noinspection PyTypeChecker
resampler_cached = RepeatedCrossValidationResampler(
model=RandomForestClassifier(),
transformations=None, # different
scores=[], # different
# model_persistence_manager shouldn't even be used (and we deleted the models above)
model_persistence_manager=LocalCacheManager(cache_directory=model_cache_directory),
results_persistence_manager=LocalCacheManager(cache_directory=resampler_cache_directory,
key='test'),
folds=1, # different
repeats=1, # different
parallelization_cores=-1)
self.assertRaises(ModelNotFittedError, lambda: resampler_cached.results)
time_start = time.time()
# noinspection PyTypeChecker
resampler_cached.resample(data_x=None, data_y=None, hyper_params=None)
time_stop = time.time()
assert (time_stop - time_start) < 1
assert len(resampler_cached.results._scores) == 25
assert all([len(x) == 4 and
isinstance(x[0], KappaScore) and
isinstance(x[1], SensitivityScore) and
isinstance(x[2], SpecificityScore) and
isinstance(x[3], ErrorRateScore)
for x in resampler_cached.results._scores])
assert resampler_cached.results.num_resamples == 25
assert os.path.isfile(os.path.join(resampler_cache_directory, expected_file))
assert resampler_cached.results.score_names == ['kappa', 'sensitivity', 'specificity', 'error_rate']
# make sure the order of the resampled_scores is the same order as Evaluators passed in
assert all(resampler_cached.results.resampled_scores.columns.values == ['kappa', 'sensitivity', 'specificity', 'error_rate']) # noqa
# score_means and score_standard_deviations comes from resampled_scores, so testing both
assert isclose(resampler_cached.results.score_means['kappa'], 0.586495320545703)
assert isclose(resampler_cached.results.score_means['sensitivity'], 0.721899136052689)
assert isclose(resampler_cached.results.score_means['specificity'], 0.8617441563168404)
assert isclose(resampler_cached.results.score_means['error_rate'], 0.192053148900336)
assert isclose(resampler_cached.results.score_standard_deviations['kappa'], 0.06833478821655113)
assert isclose(resampler_cached.results.score_standard_deviations['sensitivity'], 0.06706830388930413)
assert isclose(resampler_cached.results.score_standard_deviations['specificity'], 0.03664756028501139)
assert isclose(resampler_cached.results.score_standard_deviations['error_rate'], 0.031189357324296424)
assert isclose(resampler_cached.results.score_coefficients_of_variation['kappa'], round(0.06833478821655113 / 0.586495320545703, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['sensitivity'], round(0.06706830388930413 / 0.721899136052689, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['specificity'], round(0.03664756028501139 / 0.8617441563168404, 2)) # noqa
assert isclose(resampler_cached.results.score_coefficients_of_variation['error_rate'], round(0.031189357324296424 / 0.192053148900336, 2)) # noqa
shutil.rmtree(resampler_cache_directory)
def test_resampler_hyper_params(self):
data = TestHelper.get_cement_data()
data_y = data.strength
data = data.drop(columns='strength')
data_copy = data.copy()
resampler = RepeatedCrossValidationResampler(
model=ElasticNetRegressor(),
transformations=[ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.DUMMY)],
scores=[RmseScore(),
MaeScore()],
folds=5,
repeats=5,
fold_decorators=[ModelDecorator()],
parallelization_cores=0)
hp = ElasticNetRegressorHP(alpha=1, l1_ratio=1)
resampler.resample(data_x=data, data_y=data_y, hyper_params=hp)
# only passed in 1 decorator (so it is at index [0])
assert len(resampler.decorators) == 1
model_list = resampler.decorators[0]._model_list
assert len(model_list) == 5*5
# make sure all of the trained params are the same
trained_params = [x.model_object.get_params() for x in model_list]
for index in range(len(trained_params)):
assert trained_params[index] == trained_params[0]
# make sure the param dict is set to what we think it should, and that it is a subset
# of all the trained params
assert hp.params_dict == {'alpha': 1, 'l1_ratio': 1}
assert OOLearningHelpers.dict_is_subset(subset=hp.params_dict, superset=trained_params[0])
assert TestHelper.ensure_all_values_equal(data, data_copy)
resampler = RepeatedCrossValidationResampler(
model=ElasticNetRegressor(),
transformations=[ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.DUMMY)],
scores=[RmseScore(),
MaeScore()],
folds=5,
repeats=5,
fold_decorators=[ModelDecorator()],
parallelization_cores=0)
hp = ElasticNetRegressorHP()
resampler.resample(data_x=data, data_y=data_y, hyper_params=hp)
# only passed in 1 decorator (so it is at index [0])
assert len(resampler.decorators) == 1
model_list = resampler.decorators[0]._model_list
assert len(model_list) == 5 * 5
# make sure all of the trained params are the same
trained_params = [x.model_object.get_params() for x in model_list]
for index in range(len(trained_params)):
assert trained_params[index] == trained_params[0]
# make sure the param dict is set to what we think it should, and that it is a subset
# of all the trained params
assert hp.params_dict == {'alpha': 0.5, 'l1_ratio': 0.5}
assert OOLearningHelpers.dict_is_subset(subset=hp.params_dict, superset=trained_params[0])
def test_resampler_append_transformations(self):
data = TestHelper.get_cement_data()
resampler = RepeatedCrossValidationResampler(model=MockRegressionModelWrapper(data_y=data.strength),
transformations=[ImputationTransformer(),
DummyEncodeTransformer(CategoricalEncoding.DUMMY)], # noqa
scores=[RmseScore(),
MaeScore()])
transformations = resampler._transformer_factory.get()
assert len(transformations) == 2
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
resampler.append_transformations([BoxCoxTransformer(features=['temp'])])
transformations = resampler._transformer_factory.get()
assert len(transformations) == 3
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
assert isinstance(transformations[2], BoxCoxTransformer)
######################################################################################################
# None, [None], and [] should not change the number of transformations
######################################################################################################
resampler.append_transformations(None)
transformations = resampler._transformer_factory.get()
assert len(transformations) == 3
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
assert isinstance(transformations[2], BoxCoxTransformer)
resampler.append_transformations([None])
transformations = resampler._transformer_factory.get()
assert len(transformations) == 3
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
assert isinstance(transformations[2], BoxCoxTransformer)
resampler.append_transformations([])
transformations = resampler._transformer_factory.get()
assert len(transformations) == 3
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
assert isinstance(transformations[2], BoxCoxTransformer)
resampler.append_transformations([BooleanToIntegerTransformer(), CenterScaleTransformer()])
transformations = resampler._transformer_factory.get()
assert len(transformations) == 5
assert isinstance(transformations[0], ImputationTransformer)
assert isinstance(transformations[1], DummyEncodeTransformer)
assert isinstance(transformations[2], BoxCoxTransformer)
assert isinstance(transformations[3], BooleanToIntegerTransformer)
assert isinstance(transformations[4], CenterScaleTransformer)
| 59.406629
| 277
| 0.669787
| 7,569
| 73,486
| 6.290131
| 0.070287
| 0.05209
| 0.049905
| 0.051166
| 0.862424
| 0.847658
| 0.838437
| 0.830624
| 0.820269
| 0.803319
| 0
| 0.054802
| 0.225022
| 73,486
| 1,236
| 278
| 59.454693
| 0.781194
| 0.319449
| 0
| 0.732036
| 0
| 0
| 0.059043
| 0.021478
| 0
| 0
| 0
| 0
| 0.32485
| 1
| 0.028443
| false
| 0.010479
| 0.019461
| 0
| 0.052395
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
740fbd254e3ff6a6194fc8d45aa0bca5b4f77d30
| 1,722
|
py
|
Python
|
tests/test_filter.py
|
c4deszes/pytest-variant
|
0b7de6bd5c11ef0923f986aa7ee3f59af8cc6f65
|
[
"MIT"
] | null | null | null |
tests/test_filter.py
|
c4deszes/pytest-variant
|
0b7de6bd5c11ef0923f986aa7ee3f59af8cc6f65
|
[
"MIT"
] | null | null | null |
tests/test_filter.py
|
c4deszes/pytest-variant
|
0b7de6bd5c11ef0923f986aa7ee3f59af8cc6f65
|
[
"MIT"
] | null | null | null |
#pylint: disable = missing-function-docstring
"""
Tests variant filtering mechanisms
"""
import pytest
@pytest.mark.integration
def test_filter_dict(testdir):
testdir.makeconftest(
"""
# Local variant setting
def pytest_configure(config):
config.variant = {'os': 'win32', 'arch': 'x86'}
"""
)
testdir.makepyfile(
"""
import pytest
def test_feature_common(variant):
pass
@pytest.mark.variant({'os': 'win32'})
def test_feature_win32():
pass
@pytest.mark.variant({'os': 'win32', 'arch': 'x64'})
def test_feature_win32_x64():
pass
@pytest.mark.variant({'os': 'win32', 'arch': ['x86', 'x64']})
def test_feature_win32_x86():
pass
"""
)
result = testdir.runpytest_subprocess()
result.assert_outcomes(passed=3, failed=0)
@pytest.mark.integration
def test_filter_expr(testdir):
testdir.makeconftest(
"""
# Local variant setting
def pytest_configure(config):
config.variant = {'os': 'win32', 'arch': 'x86'}
"""
)
testdir.makepyfile(
"""
import pytest
def test_feature_common(variant):
pass
@pytest.mark.variant("os == 'win32'")
def test_feature_win32():
pass
@pytest.mark.variant("os == 'win32' and arch == 'x64'")
def test_feature_win32_x64():
pass
@pytest.mark.variant("os == 'win32' and arch == 'x86'")
def test_feature_win32_x86():
pass
"""
)
result = testdir.runpytest_subprocess()
result.assert_outcomes(passed=3, failed=0)
| 24.6
| 69
| 0.558072
| 174
| 1,722
| 5.350575
| 0.247126
| 0.075188
| 0.120301
| 0.135338
| 0.902256
| 0.895811
| 0.822771
| 0.818475
| 0.807734
| 0.807734
| 0
| 0.045151
| 0.305459
| 1,722
| 69
| 70
| 24.956522
| 0.733278
| 0.045877
| 0
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 1
| 0.117647
| false
| 0.117647
| 0.058824
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
742133beb14bbbfb278e3101f845855b80cc19ed
| 11,817
|
py
|
Python
|
web2py/applications/rip/controllers/vcOperation.py
|
2spmohanty/vcenter-automation
|
1d10b765ef335087902b0194ed12a61e53807987
|
[
"Apache-2.0"
] | 1
|
2019-10-02T13:25:03.000Z
|
2019-10-02T13:25:03.000Z
|
web2py/applications/rip/controllers/vcOperation.py
|
2spmohanty/vcenter-automation
|
1d10b765ef335087902b0194ed12a61e53807987
|
[
"Apache-2.0"
] | null | null | null |
web2py/applications/rip/controllers/vcOperation.py
|
2spmohanty/vcenter-automation
|
1d10b765ef335087902b0194ed12a61e53807987
|
[
"Apache-2.0"
] | 1
|
2021-11-05T09:51:02.000Z
|
2021-11-05T09:51:02.000Z
|
__author__ = 'smrutim'
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
#########################################################################
## This is a VC controller
#########################################################################
import os
import glob
import paramiko
import shutil
import gluon.contenttype as c
from ctypes import *
import sys
import commands
from gluon.tools import Crud
crud = Crud(db)
from pyVmomi import vim
from pyVim.connect import SmartConnect, Disconnect
import atexit
import getpass
import logging
import re
import ssl
import requests
import time
import json
import datetime
if False:
from gluon import *
request = current.request
response = current.response
session = current.session
cache = current.cache
T = current.T
def vcApi():
return dict()
########################## vcMemLeak Begins ###################################################
def insertIntoopstatusTable_vcMemLeak(form):
try:
vcops_launchid = form.vars.launchid
vcops_launchdate = form.vars.launchdate
vcops_launchby = form.vars.launchby
vcops_inputjson = form.vars.inputjson
vc_ops_json_string = json.dumps(form.vars.inputjson, indent=4)
vc_ops_json_data = json.loads(vc_ops_json_string)
if vc_ops_json_data['operation'] == "memoryleak" and "vc" in vc_ops_json_data:
operation_type = vc_ops_json_data['operation']
vc_dict = vc_ops_json_data['vc']
for vc_item in vc_dict:
host = vc_item['vcname']
service_name = vc_item['service']
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Memory Leak - Form Validation - Host: %s , Service Name: %s Operation Type: %s"
%(runTime,host,str(service_name),operation_type))
db.opstatus.insert(launchid=vcops_launchid, launchdate=vcops_launchdate
, launchby=vcops_launchby, opstype=operation_type,
opsdata=vcops_inputjson)
db.commit()
else:
e = Exception("Invalid JSON input for VC Memory Leak Analysis Operation.")
session.flash = T(str(e))
except Exception,e:
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Memory Leak Analysis - Form Validation Error: %s."%(runTime,str(e)))
print "Unable to initiate operation due to " + str(e)
print "Follow the following steps sequentially to debug the error."
print "1 : Check and validate JSON with sample JSON."
print "2 : If JSON is valid, Please click the below link to file a bug with description."
session.flash = T(str(e))
def vcMemLeak():
form = SQLFORM(db.vcmemleaktable)
form.custom.widget.inputjson.update(_placeholder="{'Refer Sample JSON':''})")
if form.process(onvalidation=insertIntoopstatusTable_vcMemLeak).accepted:
session.flash = 'Success!'
session.queryfield = form.vars.launchid
redirect(URL('opStatus', 'opstats', vars=dict(queryfield=session.queryfield)))
elif form.errors:
session.flash = 'Form has errors'
return dict(form=form)
########################## vcMemLeak Ends ###################################################
########################## vcHeapAnalysis Begins ###################################################
def insertIntoopstatusTable_vcHeap(form):
try:
vcops_launchid = form.vars.launchid
vcops_launchdate = form.vars.launchdate
vcops_launchby = form.vars.launchby
vcops_inputjson = form.vars.inputjson
vc_ops_json_string = json.dumps(form.vars.inputjson, indent=4)
vc_ops_json_data = json.loads(vc_ops_json_string)
if vc_ops_json_data['operation'] == "heapanalysis" and "vc" in vc_ops_json_data:
operation_type = vc_ops_json_data['operation']
vc_dict = vc_ops_json_data['vc']
for vc_item in vc_dict:
host = vc_item['vcname']
username = vc_item['username']
password = vc_item['password']
jmapPath = vc_item['jmapPath']
dumpDir = vc_item['dumpDir']
service_name = vc_item['service']
hprofname = vc_item.get('hprof', None)
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Heap Analysis - Form Validation - Host: %s , Service Name: %s Operation Type: %s"
%(runTime,host,str(service_name),operation_type))
db.opstatus.insert(launchid=vcops_launchid, launchdate=vcops_launchdate
, launchby=vcops_launchby, opstype=operation_type,
opsdata=vcops_inputjson)
db.commit()
else:
e = Exception("Invalid JSON input for VC Heap Analysis Operation.")
session.flash = T(str(e))
except Exception,e:
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Heap Analysis - Form Validation Error: %s."%(runTime,str(e)))
print "Unable to initiate operation due to " + str(e)
print "Follow the following steps sequentially to debug the error."
print "1 : Check and validate JSON with sample JSON."
print "2 : If JSON is valid, Please click the below link to file a bug with description."
session.flash = T(str(e))
def vcHeapAnalysis():
form = SQLFORM(db.vcheapanalyzetable)
form.custom.widget.inputjson.update(_placeholder="{'Refer Sample JSON':''})")
if form.process(onvalidation=insertIntoopstatusTable_vcHeap).accepted:
session.flash = 'Success!'
session.queryfield = form.vars.launchid
redirect(URL('opStatus', 'opstats', vars=dict(queryfield=session.queryfield)))
elif form.errors:
session.flash = 'Form has errors'
return dict(form=form)
########################## vcHeapAnalysis Ends ###################################################
def vcStats():
return dict()
#################################VC Memory Growth Begins###################################
def insertIntoopstatusTable_vcMemGrowth(form):
try:
vcops_launchid = form.vars.launchid
vcops_launchdate = form.vars.launchdate
vcops_launchby = form.vars.launchby
vcops_inputjson = form.vars.inputjson
vc_ops_json_string = json.dumps(form.vars.inputjson, indent=4)
vc_ops_json_data = json.loads(vc_ops_json_string)
if vc_ops_json_data['operation'] == "memorygrowth" and "vc" in vc_ops_json_data:
operation_type = vc_ops_json_data['operation']
vc_dict = vc_ops_json_data['vc']
for vc_item in vc_dict:
vcName = vc_item["vcname"]
vcUser = vc_item["ssh_user"]
vcPwd = vc_item["ssh_pass"]
vcLocalUser = vc_item["local_user"]
vcLocalPwd = vc_item["local_pass"]
vcBuild = vc_item["vc_build"]
vcVersion = vc_item["vc_version"]
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Memory Growth Analysis - Form Validation - VC: %s , Operation Type: %s"
%(runTime,vcName,operation_type))
db.opstatus.insert(launchid=vcops_launchid, launchdate=vcops_launchdate
, launchby=vcops_launchby, opstype=operation_type,
opsdata=vcops_inputjson)
db.commit()
else:
e = Exception("Invalid JSON input for VC Memory Leak Analysis Operation.")
session.flash = T(str(e))
except Exception,e:
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Memory Leak Analysis - Form Validation Error: %s."%(runTime,str(e)))
print "Unable to initiate operation due to " + str(e)
print "Follow the following steps sequentially to debug the error."
print "1 : Check and validate JSON with sample JSON."
print "2 : If JSON is valid, Please click the below link to file a bug with description."
session.flash = T(str(e))
def vcMemGrowth():
form = SQLFORM(db.vcmemgrowthtable)
form.custom.widget.inputjson.update(_placeholder="{'Refer Sample JSON':''})")
if form.process(onvalidation=insertIntoopstatusTable_vcMemGrowth).accepted:
session.flash = 'Success!'
session.queryfield = form.vars.launchid
redirect(URL('opStatus', 'opstats', vars=dict(queryfield=session.queryfield)))
elif form.errors:
session.flash = 'Form has errors'
return dict(form=form)
#################################VC Memory Growth Ends###################################
###############################VPXD Memory Leak Begins ###############################
def insertIntoopstatusTable_vpxdMemLeak(form):
try:
vcops_launchid = form.vars.launchid
vcops_launchdate = form.vars.launchdate
vcops_launchby = form.vars.launchby
vcops_inputjson = form.vars.inputjson
vc_ops_json_string = json.dumps(form.vars.inputjson, indent=4)
vc_ops_json_data = json.loads(vc_ops_json_string)
if vc_ops_json_data['operation'] == "vpxdmemleak" and "vc" in vc_ops_json_data:
operation_type = vc_ops_json_data['operation']
vc_dict = vc_ops_json_data['vc']
for vc_item in vc_dict:
host = vc_item['vcname']
username = vc_item['username']
password = vc_item['password']
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - VPXD Memory Leak Analysis - Form Validation - VC: %s , Operation Type: %s"
% (runTime, host, operation_type))
db.opstatus.insert(launchid=vcops_launchid, launchdate=vcops_launchdate
, launchby=vcops_launchby, opstype=operation_type,
opsdata=vcops_inputjson)
db.commit()
else:
e = Exception("Invalid JSON input for VPXD Memory Leak Analysis Operation.")
session.flash = T(str(e))
except Exception, e:
runTime = datetime.datetime.now().strftime("%d-%m-%y:%H:%M:%S")
print ("THREAD - %s - Memory Leak Analysis - Form Validation Error: %s." % (runTime, str(e)))
print "Unable to initiate operation due to " + str(e)
print "Follow the following steps sequentially to debug the error."
print "1 : Check and validate JSON with sample JSON."
print "2 : If JSON is valid, Please click the below link to file a bug with description."
session.flash = T(str(e))
def vpxdMemLeak():
form = SQLFORM(db.vpxdmemleaktable)
form.custom.widget.inputjson.update(_placeholder="{'Refer Sample JSON':''})")
if form.process(onvalidation=insertIntoopstatusTable_vpxdMemLeak).accepted:
session.flash = 'Success!'
session.queryfield = form.vars.launchid
redirect(URL('opStatus', 'opstats', vars=dict(queryfield=session.queryfield)))
elif form.errors:
session.flash = 'Form has errors'
return dict(form=form)
###############################VPXD Memory Leak Ends ###########################
| 44.931559
| 120
| 0.58162
| 1,318
| 11,817
| 5.074355
| 0.147951
| 0.020933
| 0.037679
| 0.038876
| 0.800239
| 0.791567
| 0.790969
| 0.790969
| 0.790969
| 0.777213
| 0
| 0.001492
| 0.262503
| 11,817
| 263
| 121
| 44.931559
| 0.765921
| 0.024879
| 0
| 0.669811
| 0
| 0.018868
| 0.225773
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.023585
| 0.099057
| null | null | 0.113208
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
742e2b91330d5d6a4077928f26ac6c808c65c22e
| 159,239
|
py
|
Python
|
spcl/models/hierarchy_gcn_label_propagation.py
|
tangshixiang/HCD
|
a843208bf749622d0fb118b9898c8103dd7208c5
|
[
"MIT"
] | 4
|
2021-11-28T07:49:13.000Z
|
2022-01-21T13:59:41.000Z
|
spcl/models/hierarchy_gcn_label_propagation.py
|
tangshixiang/HCD
|
a843208bf749622d0fb118b9898c8103dd7208c5
|
[
"MIT"
] | null | null | null |
spcl/models/hierarchy_gcn_label_propagation.py
|
tangshixiang/HCD
|
a843208bf749622d0fb118b9898c8103dd7208c5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from spcl.models.utils import GraphConv, MeanAggregator
from spcl.utils.faiss_rerank import compute_jaccard_distance,compute_jaccard_distance_step1,compute_jaccard_distance_inital_rank,compute_knn
class Point_Level_LP(nn.Module):
def __init__(self,alpha,beta=1,method=1,connect_num=20,topk_num=0.45):
super(Point_Level_LP, self).__init__()
#self.loss=torch.nn.CrossEntropyLoss().cuda()
self.loss=torch.nn.BCEWithLogitsLoss()
self.eps = np.finfo(float).eps
self.w_topk=-1
self.alpha=alpha
self.beta=beta
self.once_forward=1
self.method=method
self.connect_num=connect_num
self.topk_num=topk_num
def forward(self,indexes,features,neighbor_num,ori_0,ori_knn_neighbor,gt_conf=None,f_s=None,train=0,two_hop=0):
bs=len(indexes)
if two_hop:
neighbor_num=400
Y=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
W0=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
all_neighbors=torch.zeros((bs,neighbor_num)).long().cuda()-1
ori_knn_neighbor=ori_knn_neighbor.cpu().numpy()
for i in range(bs):
unique_hop_2_neighbor=list(set(np.unique(ori_0[ori_knn_neighbor[i,1:]][:,1:]).tolist())-set(ori_knn_neighbor[i].tolist()))
all_neighbor=torch.from_numpy(np.concatenate((ori_knn_neighbor[i],np.array(unique_hop_2_neighbor))))
all_neighbor_feat=features[all_neighbor.long()]
W0[i,:len(all_neighbor_feat),:len(all_neighbor_feat)]=all_neighbor_feat.mm(all_neighbor_feat.t())
Y[i,:len(all_neighbor_feat),:len(all_neighbor_feat)]=torch.eye(len(all_neighbor_feat))
all_neighbors[i,:len(all_neighbor_feat)]=all_neighbor.clone()
Y[:, 0, 0] = 0 # wo self
else:
all_neighbors=ori_knn_neighbor
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
Y[:, :, :neighbor_num] = torch.eye(neighbor_num).unsqueeze(0).repeat(bs, 1, 1)
Y[:, 0, 0] = 0 # wo self
# cal W
index_feat = features[ori_knn_neighbor.view(-1)].view(bs, -1, 2048)
# index_feat=torch.cat((f_s,index_feat[:,1:]))
i_feat = torch.cat((f_s.unsqueeze(1), index_feat[:, 1:neighbor_num]), dim=1)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask=(1-torch.eye(neighbor_num)).unsqueeze(0).cuda()
if self.method==2:
#import pdb;pdb.set_trace()
#step1-->k_reciprocal_index
topk_num=20
topk, indices = torch.topk(W0, topk_num, dim=2)
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
#step2-->softmax
W0 = torch.exp(-(2 - 2 * W0))
W0*=mask_top
W0/=torch.sum(W0+self.eps,dim=-1,keepdim=True)
#avg
k2=6
W=torch.zeros_like(W0)
tmp=torch.arange(bs).unsqueeze(1).expand_as(indices[:,:,0])
for kk in range(k2):
W+=W0[tmp,indices[:,:,kk]]
W/=k2
W0=W.clone()#keep for split part
#1-jarrcard distance-->indexes
W=torch.sum(torch.min(W[:,0,:].unsqueeze(1).expand_as(W),W),dim=-1)
preds=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
preds[:,0]=W/(2-W)
preds[:,0,0]=0
else:
#topk_num=10
topk, indices = torch.topk(W0, self.connect_num,dim=2)
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(-1, indices, 1)
mask_top = ((mask_top>0)&(mask_top.permute((0,2,1))>0)).type(torch.float32)
##for debug###
#print('W:',(W0[0][0][:topk_num]).tolist())
##############
#W0=torch.exp(W0)
# #change y to softmax
# with torch.no_grad():
# sim=W0[:,0,:]
# Y[:,:,-1]=F.softmax(sim,dim=1)
#W=(W0/(topk_num-1))*mask.expand_as(W0)
W = torch.exp(-(2 - 2 * W0))
W*=mask_top
mask_top = (W0 > self.topk_num).long() # thre
W *= mask_top
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=mask.expand_as(W)
#import pdb;pdb.set_trace()
preds = torch.matmul(torch.inverse(torch.eye(neighbor_num).unsqueeze(0).expand_as(W).cuda()-self.alpha*W+self.eps), Y)
##for debug###
#print('preds:',(preds[0][0][:topk_num]).tolist())
#import pdb;pdb.set_trace()
if train:
# loss=0
# for F0 in preds:
# #normalize
# F0[0]/=F0[0].max().item()
# loss+=self.loss(F0[0,1:neighbor_num],gt_conf[i,1:neighbor_num])
# loss/=len(indexes)
# return loss
with torch.no_grad():
max_num,_=preds.max(2)
preds/=max_num.unsqueeze(2).expand_as(preds)
loss=self.loss(preds[:,0,1:neighbor_num],gt_conf[:,1:neighbor_num])
if torch.isnan(loss):
print('nan')
import pdb;pdb.set_trace()
return loss
else:
return preds,W0,all_neighbors
class Sub_Cluster_Level_LP(nn.Module):
def __init__(self, alpha,topk_num=5,beta=1,method=1):
super(Sub_Cluster_Level_LP, self).__init__()
self.alpha=alpha
self.eps = np.finfo(float).eps
self.loss=torch.nn.BCEWithLogitsLoss()
self.once_forward=1
self.beta=beta
self.topk_num=topk_num
self.w_use_dist=1
self.method=method
def forward(self,indexes,features,neighbor_num,ori_0,ori_knn_neighbor,gt_conf_ori=None,f_s=None,train=0,sub_label=None,gt_sub_label=None,gt_label=None,debug_label=None,bias=0):
bs=len(indexes)
sub_sum = torch.zeros(sub_label.max()+1, 2048).float().cuda()
sub_sum.index_add_(0, sub_label, features)
nums = torch.zeros(sub_label.max()+1, 1).float().cuda()
nums.index_add_(0, sub_label, torch.ones(len(sub_label),1).float().cuda())
mask = (nums>0).float()
sub_sum /= (mask*nums+(1-mask)).clone().expand_as(sub_sum)
if not train:
print('sub max:',nums.max())
#cal Y
Y=torch.zeros(bs,neighbor_num,neighbor_num+1).cuda()
indices=[]
mapping=[]
if train:
gt_conf=torch.zeros_like(gt_conf_ori)
for i in range(bs):
output, inverse_indices,cnts=torch.unique(gt_sub_label[i],return_inverse=True,return_counts=True)
if train:
output2,inverse_indices2=torch.unique(gt_label[i],return_inverse=True)
#change gt
out_gt=torch.unique(inverse_indices[inverse_indices2==inverse_indices2[0]])
gt_conf[i,out_gt]=1
indices.append(inverse_indices)
mapping.append(output)
Y[i,torch.arange(neighbor_num),inverse_indices]=1
Y[i,:,:len(cnts)]/=cnts.unsqueeze(0)
Y[:,0]=0
# masks[:,0,0]=0
# masks[:,0,1:]=1
Y[:,1:,neighbor_num]=self.beta/(neighbor_num-1) #bias
#cal W
index_feat=sub_sum[sub_label[ori_knn_neighbor.view(-1)]].view(bs,-1,2048)
#index_feat=torch.cat((f_s,index_feat[:,1:]))
if self.method==4:
i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
i_feat/=torch.norm(i_feat,dim=2,keepdim=True)
W0=i_feat.bmm(i_feat.permute(0,2,1))
#
else:
if self.method!=3 and self.method!=2:
i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
W0=i_feat.bmm(i_feat.permute(0,2,1))
else:
i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
i_feat/=torch.norm(i_feat,dim=2,keepdim=True)
W0=i_feat.bmm(i_feat.permute(0,2,1))
topk_num=self.topk_num
topk, indices = torch.topk(W0, topk_num,dim=2)
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top>0)&(mask_top.permute((0,2,1))>0)).type(torch.float32)
#mask_top = ((mask_top>0)+mask_top.permute((0,2,1))>0).type(torch.float32) #union
#print('sub W:',(W0[0][0][:topk_num]).tolist())
#W0=torch.exp(W0)
masks=(1-torch.eye(neighbor_num)).unsqueeze(0).cuda()
if not self.w_use_dist:
W=(W0/4)*masks.expand_as(W0)
W*=mask_top
else:
if self.method==1 or self.method==3:#mask-->norm
W0=torch.exp(-(2-2*W0)) #dist
W=W0*mask_top
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=masks.expand_as(W)
elif self.method==2:#norm-->mask
W=torch.exp(-(2-2*W0)) #dist
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=masks.expand_as(W)
W*=mask_top
# D= W.sum(1)
# D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
# D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
# D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
# S = D1*W*D2
preds = torch.matmul(torch.inverse(torch.eye(neighbor_num).unsqueeze(0).expand_as(W).cuda()-self.alpha*W+self.eps), Y)
merge_match_id=torch.argmax(Y[0],dim=0)
merge_sim=W0[0][0][merge_match_id[preds[0][0]>bias]]
print('sub merge sim:',merge_sim)
#import pdb;pdb.set_trace()
#print('sub preds:',(preds[0][0][:topk_num]).tolist())
if train:
with torch.no_grad():
max_num,_=preds.max(2)
preds/=max_num.unsqueeze(2).expand_as(preds)
loss=self.loss(preds[:,0,1:neighbor_num],gt_conf[:,1:neighbor_num])
if torch.isnan(loss):
print('nan')
import pdb;pdb.set_trace()
return loss
else:
return preds,sub_sum,nums,mapping,indices
class Cluster_Level_LP(nn.Module):
def __init__(self, alpha,topk_num,beta=1,method=1,point_wei=0.6,connect_num=20):
super(Cluster_Level_LP, self).__init__()
self.alpha=alpha
self.eps = np.finfo(float).eps
self.loss=torch.nn.BCEWithLogitsLoss()
self.beta=beta
self.only_consider_once=1
self.topk_num=topk_num
self.w_use_dist=1
self.method=method
self.point_wei=point_wei
self.connect_num=connect_num
def forward(self, indexes, features,neighbor_num0,ori_0,ori_knn_neighbor,gt_conf_ori=None,f_s=None,train=0,labels=None,gt_label=None,debug_label=None,bias=0,step=2,point_W=None,two_hop=0,memory=None,point_pred=None):
bs=len(indexes)
neighbor_num=ori_knn_neighbor.size(1)
#masks[:,0,0]=0
#masks[:,0,1:]=1
clu_sum = torch.zeros(labels.max() + 1, 2048).float().cuda()
clu_sum.index_add_(0, labels, features)
nums = torch.zeros(labels.max() + 1, 1).float().cuda()
nums.index_add_(0, labels, torch.ones(len(labels), 1).float().cuda())
mask = (nums > 0).float()
clu_sum /= (mask * nums + (1 - mask)).clone().expand_as(clu_sum)
if not train:
print('step {} max:'.format(step), nums.max())
#Y[:,1:,neighbor_num]=self.beta/(neighbor_num-1) #bias
#cal W
if self.method==15: #first point topk+cluster thre
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
tmp = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i] > -1], return_inverse=True,
return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
tmp.extend(output.tolist())
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
Y[:, 0] = 0
# normalize
i_feat = index_feat
# i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) # only keep one
indes = []
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i, 1:] > -1]).cpu().numpy(),
return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
# import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
# connect method####
topk, indices = torch.topk(point_W, self.connect_num, dim=-1)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
W *= mask_top
# mask_top = (W0 > self.topk_num).long() # thre
W *= ((W0 > self.topk_num).long())
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
if self.method==14:
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
tmp = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i] > -1], return_inverse=True,
return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
tmp.extend(output.tolist())
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
Y[:, 0] = 0
# normalize
i_feat = index_feat
# i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) # only keep one
indes = []
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i, 1:] > -1]).cpu().numpy(),
return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
# import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
#connect method####
topk, indices = torch.topk(point_W, self.connect_num, dim=-1)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
W *= mask_top
# mask_top = (W0 > self.topk_num).long() # thre
W *= ((point_W > self.topk_num).long())
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
if self.method==13:#jaccard debug
indices = []
mapping = []
indes = []
for i in range(bs):
indices.append(torch.arange(neighbor_num))
mapping.append(gt_label[i])
preds=(point_pred>=0.4).int()
preds[:,0]*=(torch.sum(preds[:,0],dim=-1,keepdim=True)>1)
return preds, clu_sum, nums, mapping, indices, indes
if self.method==12: #change weights
point_wei = self.point_wei
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
tmp = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i] > -1], return_inverse=True,
return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
tmp.extend(output.tolist())
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
#add weights
index_feat = point_wei*features[ori_knn_neighbor.view(-1)].view(bs,-1,2048)+(1-point_wei)*clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
Y[:, 0] = 0
# normalize
i_feat = index_feat
# i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) # only keep one
indes = []
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i, 1:] > -1]).cpu().numpy(),
return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
# import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
topk, indices = torch.topk(W0, self.connect_num, dim=2)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
W *= mask_top
#mask_top = (W0 > self.topk_num).long() # thre
W *= ((W0 > self.topk_num).long())
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
if self.method==11: #change weights
point_wei = self.point_wei
if two_hop:
neighbor_num=400
W0=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
Y=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i]>-1], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
ind_feat=point_wei*features[ori_knn_neighbor[i][ori_knn_neighbor[i]>-1]]+(1-point_wei)*clu_sum[labels[ori_knn_neighbor[i][ori_knn_neighbor[i]>-1]]]
W0[i,:len(ind_feat),:len(ind_feat)]=ind_feat.mm(ind_feat.t())
Y[:,0]=0
mask_top = torch.ones_like(W0) # only keep one
indes = []
for i in range(bs):
filter_lab, inde = np.unique(gt_label[i, 1:][ori_knn_neighbor[i,1:]>-1].cpu().numpy(), return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
#bug-->cluster num
# topk, indices = torch.topk(W, 15, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) | (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
# W *= mask_top
#0217
# point_feat=features[ori_knn_neighbor.view(-1)].view(bs,-1,2048)
# W_point=point_feat.bmm(point_feat.permute((0,2,1)))
#0218
topk, indices = torch.topk(W0, 20, dim=2)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
W *= mask_top
#0218-->move before
mask_top = (W0 > self.topk_num).long() # thre
W *= mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
else:
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
tmp = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i] > -1], return_inverse=True,
return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
tmp.extend(output.tolist())
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
#add weights
index_feat = point_wei*features[ori_knn_neighbor.view(-1)].view(bs,-1,2048)+(1-point_wei)*clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
Y[:, 0] = 0
# normalize
i_feat = index_feat
# i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) # only keep one
indes = []
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i, 1:] > -1]).cpu().numpy(),
return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
# import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
mask_top = (W0 > self.topk_num).long() # thre
W *= mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
if self.method==10:#pick 3 for each cluster
candi_num=3
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
#all_output,all_inverse,all_cnts=torch.unique(labels.cpu().clone(),return_inverse=True,return_counts=True)
#all_output_3=set(all_output[all_cnts>candi_num].tolist())
all_output_3=set(torch.arange(nums.size(0))[nums.cpu().view(-1)>candi_num].tolist())
tmp=[]
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i] > -1], return_inverse=True,
return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
tmp.extend(output.tolist())
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
# only pick 3
#import pdb;pdb.set_trace()
filter_out = list(set(tmp) & all_output_3)
if len(filter_out)>0:
print('len(filter_out):',len(filter_out))
for cc in filter_out:
# re cal the index feat
cc_feat=features[labels==cc]
cc_sim=(cc_feat.mm(clu_sum[cc].unsqueeze(1))).view(-1)
topk, indices = torch.topk(cc_sim, candi_num)
clu_sum[cc]=torch.mean(cc_feat[indices],dim=0)
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
Y[:, 0] = 0
# normalize
i_feat = index_feat
# i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) # only keep one
indes=[]
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i, 1:] > -1]).cpu().numpy(),
return_index=True)
inde += 1
indes.append(inde)
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long() > 0] = 1 # self-->1
W *= mask_top # unique
# import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
mask_top = (W0 > self.topk_num).long() # thre
W *= mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
if self.method==7:
index_feat = features[ori_knn_neighbor.view(-1)].view(bs,-1,2048)#clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num + 1).cuda()
indices = []
mapping = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(neighbor_num), inverse_indices] = 1
Y[:, 0] = 0
i_feat = torch.cat((f_s.unsqueeze(1), index_feat[:, 1:neighbor_num]), dim=1)
#i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
topk, indices = torch.topk(W0, self.topk_num, dim=2) # use point level topk
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
for i in range(bs): # unique
filter_lab, inde = np.unique(gt_label[i, 1:].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
W = torch.exp(-(2 - 2 * W0)) # dist
W *= mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda() # wo self
W *= masks.expand_as(W)
if self.method==8:
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num + 1).cuda()
indices = []
mapping = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(neighbor_num), inverse_indices] = 1
Y[:, 0] = 0
i_feat = torch.cat((f_s.unsqueeze(1), index_feat[:, 1:neighbor_num]), dim=1)
i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
topk, indices = torch.topk(point_W, self.topk_num, dim=2) # use point level topk
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
for i in range(bs): # unique
filter_lab, inde = np.unique(gt_label[i, 1:].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
W = torch.exp(-(2 - 2 * W0)) # dist
W *= mask_top
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda() # wo self
W *= masks.expand_as(W)
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
if self.method==6:
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num + 1).cuda()
indices = []
mapping = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(neighbor_num), inverse_indices] = 1
Y[:, 0] = 0
i_feat = torch.cat((f_s.unsqueeze(1), index_feat[:, 1:neighbor_num]), dim=1)
i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
topk, indices = torch.topk(point_W, int(self.topk_num), dim=2)#use point level topk
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
for i in range(bs):#unique
filter_lab, inde = np.unique(gt_label[i, 1:].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
W = torch.exp(-(2 - 2 * W0)) # dist
W *= mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()#wo self
W *= masks.expand_as(W)
if self.method==5:#0128
if two_hop:
neighbor_num=99
W0=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
Y=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i]>-1], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
ind_feat=clu_sum[labels[ori_knn_neighbor[i][ori_knn_neighbor[i]>-1]]]
W0[i,:len(ind_feat),:len(ind_feat)]=ind_feat.mm(ind_feat.t())
Y[:,0]=0
mask_top = torch.ones_like(W0) # only keep one
for i in range(bs):
filter_lab, inde = np.unique(gt_label[i, 1:][ori_knn_neighbor[i,1:]>-1].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
else:
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
#for debug
clu_cnts=[]
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i]>-1], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
print('clu cnts:',clu_cnts)
Y[:, 0] = 0
# normalize
i_feat = index_feat
#i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) #only keep one
for i in range(bs):
filter_lab, inde = np.unique((gt_label[i, 1:][ori_knn_neighbor[i,1:]>-1]).cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(-(2 - 2 * W0)) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long()>0] = 1 # self-->1
W *= mask_top #unique
#import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
mask_top=(W0>self.topk_num).long()#thre
W*=mask_top
# normalize
D = W.sum(1)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 2).repeat(1, 1, neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, neighbor_num, 1)
W = D1 * W * D2
W *= masks.expand_as(W)
#method 4-->fix bug (based on method 3)
if self.method==9:#0210
if two_hop:
neighbor_num=100
W0=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
Y=torch.zeros((bs,neighbor_num,neighbor_num)).cuda()
indices = []
mapping = []
# for debug
clu_cnts = []
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i][ori_knn_neighbor[i]>-1], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(len(inverse_indices)), inverse_indices] = 1
clu_cnts.append(len(output))
ind_feat=clu_sum[labels[ori_knn_neighbor[i][ori_knn_neighbor[i]>-1]]]
W0[i,:len(ind_feat),:len(ind_feat)]=ind_feat.mm(ind_feat.t())
Y[:,0]=0
mask_top = torch.ones_like(W0) # only keep one
for i in range(bs):
filter_lab, inde = np.unique(gt_label[i, 1:][ori_knn_neighbor[i,1:]>-1].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
else:
index_feat = clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs, -1, 2048)
# cal Y
Y = torch.zeros(bs, neighbor_num, neighbor_num).cuda()
indices = []
mapping = []
#for debug
clu_cnts=[]
for i in range(bs):
output, inverse_indices, cnts = torch.unique(gt_label[i], return_inverse=True, return_counts=True)
indices.append(inverse_indices)
mapping.append(output)
Y[i, torch.arange(neighbor_num), inverse_indices] = 1
clu_cnts.append(len(output))
print('clu cnts:',clu_cnts)
Y[:, 0] = 0
# normalize
i_feat = index_feat
#i_feat /= torch.norm(i_feat, dim=2, keepdim=True)
W0 = i_feat.bmm(i_feat.permute(0, 2, 1))
mask_top = torch.ones_like(W0) #only keep one
for i in range(bs):
filter_lab, inde = np.unique(gt_label[i, 1:].cpu().numpy(), return_index=True)
inde += 1
del_list = list(set(np.arange(1, neighbor_num).tolist()) - set(inde))
mask_top[i, del_list] = 0
mask_top[i, :, del_list] = 0
Y[i, del_list] = 0
masks = (1 - torch.eye(neighbor_num)).unsqueeze(0).cuda()
# normalize
W = torch.exp(W0) # dist
W[torch.eye(neighbor_num).unsqueeze(0).expand_as(W).long()>0] = torch.exp(torch.tensor(1).float()) # self-->1
W *= mask_top #unique
#import pdb;pdb.set_trace()
# topk_num = self.topk_num
# topk, indices = torch.topk(W, topk_num, dim=2)
# mask_top = torch.zeros_like(W)
# mask_top = mask_top.scatter(2, indices, 1)
# mask_top = ((mask_top > 0) & (mask_top.permute((0, 2, 1)) > 0)).type(torch.float32)
mask_top=(W0>self.topk_num).long()#thre
W*=mask_top
# normalize
#import pdb;pdb.set_trace()
D = W.sum(1)
W/=(D.unsqueeze(2).expand_as(W)+self.eps)
W *= masks.expand_as(W)
if self.method==4:#iterative
#norm
#i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
# print('debug')
# import pdb;pdb.set_trace()
with torch.no_grad():
i_sim=f_s.mm(clu_sum.t())
#topk cluster
topk, indices = torch.topk(i_sim, neighbor_num,dim=1)
i_feat=clu_sum[indices.view(-1)].view(bs,neighbor_num,2048)
i_feat/=torch.norm(i_feat,dim=2,keepdim=True)
W0=i_feat.bmm(i_feat.permute(0,2,1))
#cal Y
Y=torch.eye(neighbor_num).unsqueeze(0).expand_as(W0).cuda()
Y[:,0]=0
mapping=indices.clone()
topk, indices = torch.topk(W0, self.topk_num,dim=2)
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top>0)&(mask_top.permute((0,2,1))>0)).type(torch.float32)
W=torch.exp(-(2-2*W0)) #dist
W*=mask_top
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
masks=(1-torch.eye(neighbor_num)).unsqueeze(0).cuda()
W*=masks.expand_as(W)
elif self.method<4:
index_feat=clu_sum[labels[ori_knn_neighbor.view(-1)]].view(bs,-1,2048)
#cal Y
Y=torch.zeros(bs,neighbor_num,neighbor_num+1).cuda()
masks=torch.ones((bs,neighbor_num,neighbor_num)).cuda()
indices=[]
mapping=[]
if train:
gt_conf=torch.zeros_like(gt_conf_ori)
for i in range(bs):
output, inverse_indices,cnts=torch.unique(gt_label[i],return_inverse=True,return_counts=True)
if train:
#change gt
gt_conf[i,inverse_indices[0]]=1
indices.append(inverse_indices)
mapping.append(output)
#masks[i,torch.arange(neighbor_num),inverse_indices]=0
Y[i,torch.arange(neighbor_num),inverse_indices]=1
if not self.only_consider_once:
Y[i,:,:len(cnts)]/=cnts.unsqueeze(0)
Y[:,0]=0
#index_feat=torch.cat((f_s,index_feat[:,1:]))
if self.method!=3 and self.method!=2:
i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
W0=i_feat.bmm(i_feat.permute(0,2,1))
else:
#normalize
i_feat=torch.cat((f_s.unsqueeze(1),index_feat[:,1:neighbor_num]),dim=1)
i_feat/=torch.norm(i_feat,dim=2,keepdim=True)
W0=i_feat.bmm(i_feat.permute(0,2,1))
topk_num=self.topk_num
topk, indices = torch.topk(W0, topk_num,dim=2)
mask_top = torch.zeros_like(W0)
mask_top = mask_top.scatter(2, indices, 1)
mask_top = ((mask_top>0)&(mask_top.permute((0,2,1))>0)).type(torch.float32)
#mask_union_top=((mask_top>0)+mask_top.permute((0,2,1))>0).type(torch.float32)
#mask_top = ((mask_top>0)+mask_top.permute((0,2,1))>0).type(torch.float32) #union
if self.only_consider_once:
for i in range(bs):
filter_lab,inde=np.unique(gt_label[i,1:].cpu().numpy(),return_index=True)
inde+=1
del_list=list(set(np.arange(1,neighbor_num).tolist())-set(inde))
mask_top[i,del_list]=0
mask_top[i,:,del_list]=0
Y[i,del_list]=0
# if not train:
# import pdb;pdb.set_trace()
print('clu W:',(W0[0][0][:10]).tolist())
#W0=torch.exp(W0)
masks=(1-torch.eye(neighbor_num)).unsqueeze(0).cuda()
if not self.w_use_dist:
W=(W0/4)*masks.expand_as(W0)
W*=mask_top
else:
if self.method==1:
W=torch.exp(-(2-2*W0)) #dist
W*=mask_top
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=masks.expand_as(W)
elif self.method==2:
W=torch.exp(-(2-2*W0)) #dist
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=masks.expand_as(W)
W*=mask_top
elif self.method==3:
#normalize
W=torch.exp(-(2-2*W0)) #dist
W*=mask_top
#normalize
D= W.sum(1)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
W = D1*W*D2
W*=masks.expand_as(W)
#change y to softmax
# with torch.no_grad():
# sim=W0[:,0,:]
# Y[:,:,-1]=F.softmax(sim,dim=1)
# D= W.sum(1)
# D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
# D1 = torch.unsqueeze(D_sqrt_inv,2).repeat(1,1,neighbor_num)
# D2 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,neighbor_num,1)
# S = D1*W*D2
preds = torch.matmul(torch.inverse(torch.eye(neighbor_num).unsqueeze(0).expand_as(W).cuda()-self.alpha*W+self.eps), Y)
# if self.method==4:
# preds[:,0,0]=bias+1#add self
#import pdb;pdb.set_trace()
#for debug
# if self.method==4:
# Y[0,0,0]=1
# merge_match_id=torch.argmax(Y[0],dim=0)
# merge_sim=W0[0][0][merge_match_id[preds[0][0]>bias]]
# print('step {} merge sim:'.format(step),merge_sim)
#import pdb;pdb.set_trace()
# if train:
# with torch.no_grad():
# max_num,_=preds.max(2)
# preds/=max_num.unsqueeze(2).expand_as(preds)
# loss=self.loss(preds[:,0,1:neighbor_num],gt_conf[:,1:neighbor_num])
# if torch.isnan(loss):
# print('nan')
# import pdb;pdb.set_trace()
# return loss
# else:
del W,Y
return preds,clu_sum,nums,mapping,indices,indes
class Split_GCN(nn.Module):
def __init__(self, feature_dim, nhid, feature_size,source_classes,nclass, momentum=0.2,dropout=0,cal_num=30):
super(Split_GCN, self).__init__()
self.conv1 = GraphConv(feature_dim, nhid, MeanAggregator, dropout)
self.nclass = 2
self.classifier = nn.Sequential(nn.Linear(nhid, nhid), nn.PReLU(nhid),
nn.Linear(nhid, self.nclass))
self.loss=torch.nn.CrossEntropyLoss().cuda()
self.source_classes=source_classes
def forward(self,indexes,features,labels,train,sub_label=0,outliers_label=None,ori_knn_neighbor=None,gt=None,sub_labels=None):
index_feat=features[indexes]
all_idxs=torch.arange(len(labels)).cuda()
loss=0
if not train: # inference
for n,idx in enumerate(indexes):
split_idxs=all_idxs[labels==labels[idx]]
if len(split_idxs)==1:
continue
split_feat=features[labels==labels[idx]]
split_sim=features[idx].unsqueeze(0).mm(split_feat.t())
anchor_idx=split_idxs[torch.argmin(split_sim)]
X=torch.cat([split_feat.unsqueeze(0),split_feat.unsqueeze(0)],dim=0)
A=X.bmm(X.permute(0,2,1))
A=F.softmax(A,dim=2)
X[0]-=features[idx]
X[1]-=features[anchor_idx]
X=self.conv1(X, A)
dout=X.size(-1)
x_0=X.view(-1,dout)
all_pred=F.softmax(self.classifier(x_0),dim=1)
all_pred=all_pred.view(2,-1,2)
all_pred=torch.argmin(all_pred[:,:,1],dim=0)
if sub_label:
labs=torch.tensor([idx.item(),anchor_idx.item()]).cuda()
labels[split_idxs]=labs[all_pred]
else:
labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
sub_idx=sub_labels[split_idxs]
sub_lab,cnts=torch.unique(sub_idx,return_counts=True)
for sub_i,sub in enumerate(sub_lab):
if torch.sum(all_pred[sub_idx==sub])>cnts[sub_i]/2:
labels[sub_labels==sub]=labs[1]
else:
labels[sub_labels==sub]=labs[0]
labels[split_idxs]=labs[all_pred]
else:
X=features[ori_knn_neighbor.view(-1)].view(len(indexes),-1,2048)
A=X.bmm(X.permute(0,2,1))
A=F.softmax(A,dim=2)
X-=X[:,0].view(-1,1,2048)
X=self.conv1(X, A)
dout=X.size(-1)
x_0=X.view(-1,dout)
all_pred=self.classifier(x_0)
gt=gt.view(-1)
loss=self.loss(all_pred,gt)
return loss
class Split_LP(nn.Module):
def __init__(self,alpha,split_num,anchor_thre,connect_num=20):
super(Split_LP, self).__init__()
self.alpha=alpha
self.eps = np.finfo(float).eps
self.method=9
self.split_num=split_num
self.anchor_thre=anchor_thre
self.connect_num=connect_num
def forward(self,indexes,features,labels,sub_level=0,sub_labels=None,outliers_label=None,ori_knn_neighbor=None,memory=None,two_hop=0,point_pred=None,point_W=None):
all_idxs=torch.arange(len(labels)).cuda()
split_nums=[]
if self.method==0:
if sub_level:
for n,idx in enumerate(indexes):
split_idxs=all_idxs[labels==labels[idx]]
if len(split_idxs)<=4:
continue
split_feat=features[labels==labels[idx]]
split_sim=features[idx].unsqueeze(0).mm(split_feat.t())
anchor_idx=split_idxs[torch.argmin(split_sim)]
split_sim_2=features[anchor_idx].unsqueeze(0).mm(split_feat.t())
anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y=torch.zeros((len(split_idxs),2)).cuda()
i_0,i_1=torch.argmax(split_sim_2),torch.argmin(split_sim_2)
Y[i_0,0]=1
Y[i_1,1]=1
W=torch.exp(split_feat.mm(split_feat.t()))
mask=torch.ones_like(W)
mask[i_0,i_0]=0
mask[i_1,i_1]=0
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
lab=torch.tensor([labels[anchor_idx].item(),labels[anchor_idx_2].item()]).cuda()
labels[split_idxs]=lab[pred]
#for debug
split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
else:
for n,idx in enumerate(indexes):
batch_idx=all_idxs[labels==labels[idx]]
batch_sub_label=sub_labels[batch_idx]
split_idxs=list(set((batch_sub_label).tolist())) #sub label
if len(split_idxs)<3:
continue
split_feat=features[split_idxs]
split_sim=features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
anchor_idx=split_idxs[torch.argmin(split_sim)]
split_sim_2=features[sub_labels[anchor_idx]].unsqueeze(0).mm(split_feat.t())
anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y=torch.zeros((len(split_idxs),2)).cuda()
i_0,i_1=torch.argmax(split_sim_2),torch.argmin(split_sim_2)
Y[i_0,0]=1
Y[i_1,1]=1
W=torch.exp(split_feat.mm(split_feat.t()))
mask=torch.ones_like(W)
mask[i_0,i_0]=0
mask[i_1,i_1]=0
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
for sub,pre in zip(split_idxs,pred):
labels[batch_idx[batch_sub_label==sub]]=labs[pre]
split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
elif self.method==1:
split_num=0
if sub_level:
print_cnts=0
for n,idx in enumerate(indexes):
split_idxs=all_idxs[labels==labels[idx]]
if len(split_idxs)<=self.split_num:
continue
split_feat=features[labels==labels[idx]]
anchor_idxs=[]
anchor_indices=[]
#0
split_sim=features[idx].unsqueeze(0).mm(split_feat.t())
if (torch.sum(split_sim)-1.0)/(len(split_idxs)-1)>=0.7: #confident core
print('sub hei')
continue
split_num+=1
anchor_idx=split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1,self.split_num):
split_sim_2=features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2<split_sim]=split_sim[split_sim_2<split_sim]
anchor_idx=split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim=split_sim_2.clone()
#anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
#fix bug 104
Y=torch.zeros((len(split_idxs),self.split_num)).cuda()
Y[anchor_indices,torch.arange(self.split_num)]=1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
#104-->fix bug
W=torch.exp(split_feat.mm(split_feat.t()))
mask=(1-torch.eye(len(split_feat))).cuda()
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
#lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab=torch.tensor(anchor_idxs).cuda()
labels[split_idxs]=lab[pred]
labels[idx]=idx
#for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts=0
for n,idx in enumerate(indexes):
batch_idx=all_idxs[labels==labels[idx]]
batch_sub_label=sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs=split_idxs.tolist() #sub label
if len(split_idxs)<=self.split_num:
continue
anchor_idxs=[]
anchor_indices=[]
split_feat=features[split_idxs]
mean_cen=torch.from_numpy(split_cnts).cuda().unsqueeze(1)*split_feat
if (torch.sum(memory.features[idx]*mean_cen)-1.0)/(len(batch_idx)-1)>=0.6: #confident core
print('clu hei')
continue
split_num+=1
split_sim=features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
anchor_idx=split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1,self.split_num):
#fix bug 20210116
split_sim_2=features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2<split_sim]=split_sim[split_sim_2<split_sim]
anchor_idx=split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim=split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y=torch.zeros((len(split_idxs),self.split_num)).cuda()
Y[anchor_indices,torch.arange(self.split_num)]=1
#104-->fix bug
W=split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask=(1-torch.eye(len(split_feat))).cuda()
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
#labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs=outliers_label[torch.arange(n,len(outliers_label),step=len(indexes))]
ori_label=labels[idx].item()
for sub,pre in zip(split_idxs,pred):
labels[batch_idx[batch_sub_label==sub]]=labs[pre]
labels[batch_idx[batch_sub_label==sub_labels[idx]]]=ori_label#outliers_label[(self.split_num-1)*len(indexes)+n]
# if print_cnts==0:
# print(pred)
# print_cnts=1
#split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
print('split num:',split_num)
elif self.method==2:
split_num=0
if sub_level:
print_cnts=0
for n,idx in enumerate(indexes):
split_idxs=all_idxs[labels==labels[idx]]
core_candidate = torch.tensor(
list(set(split_idxs.tolist()) & (set(ori_knn_neighbor[n].tolist())))).long().cuda()
if len(core_candidate)<=self.split_num:
continue
split_num+=1
tmp_map = {}
for tmp_id, x in enumerate(split_idxs):
tmp_map[x.item()] = tmp_id
anchor_idxs=[]
anchor_indices=[]
#0
split_feat = features[core_candidate]
split_sim=features[idx].unsqueeze(0).mm(split_feat.t())
anchor_idx=core_candidate[torch.argmin(split_sim)].item()
anchor_idxs.append(anchor_idx)
anchor_indices.append(tmp_map[anchor_idx])
for sp in range(1,self.split_num):
split_sim_2=features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2<split_sim]=split_sim[split_sim_2<split_sim]
anchor_idx=core_candidate[torch.argmin(split_sim_2)].item()
anchor_idxs.append(anchor_idx)
anchor_indices.append(tmp_map[anchor_idx])
split_sim=split_sim_2.clone()
#anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
#fix bug 104
split_feat = features[split_idxs]
Y=torch.zeros((len(split_idxs),self.split_num)).cuda()
Y[anchor_indices,torch.arange(self.split_num)]=1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
#104-->fix bug
W=torch.exp(split_feat.mm(split_feat.t()))
mask=(1-torch.eye(len(split_feat))).cuda()
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
#lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab=torch.tensor(anchor_idxs).cuda()
labels[split_idxs]=lab[pred]
labels[idx]=idx
#for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts=0
for n,idx in enumerate(indexes):
batch_idx=all_idxs[labels==labels[idx]]
batch_sub_label=sub_labels[batch_idx]
split_idxs=list(set((batch_sub_label).tolist())) #sub label
nei_sub_label = sub_labels[ori_knn_neighbor[n]]
core_candidate = torch.tensor(
list(set(split_idxs) & (set(nei_sub_label.tolist())))).long().cuda()
if len(core_candidate)<=self.split_num:
continue
split_num += 1
tmp_map = {}
for tmp_id, x in enumerate(split_idxs):
tmp_map[x] = tmp_id
anchor_idxs=[]
anchor_indices=[]
split_feat=features[core_candidate]
split_sim=features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
anchor_idx=core_candidate[torch.argmin(split_sim)].item()
anchor_idxs.append(anchor_idx)
anchor_indices.append(tmp_map[anchor_idx])
for sp in range(1,self.split_num):
split_sim_2=features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2<split_sim]=split_sim[split_sim_2<split_sim]
anchor_idx=core_candidate[torch.argmin(split_sim_2)].item()
anchor_idxs.append(anchor_idx)
anchor_indices.append(tmp_map[anchor_idx])
split_sim=split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
split_feat = features[split_idxs]
Y=torch.zeros((len(split_idxs),self.split_num)).cuda()
Y[anchor_indices,torch.arange(self.split_num)]=1
#104-->fix bug
W=split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask=(1-torch.eye(len(split_feat))).cuda()
W*=mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0/(D+self.eps))
D1 = torch.unsqueeze(D_sqrt_inv,1).repeat(1,len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv,0).repeat(len(split_idxs),1)
S = D1*W*D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda()-self.alpha*S+self.eps), Y)
pred=torch.argmax(pred,dim=1)
#labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs=outliers_label[torch.arange(n,len(outliers_label),step=len(indexes))]
ori_label=labels[idx].item()
for sub,pre in zip(split_idxs,pred):
labels[batch_idx[batch_sub_label==sub]]=labs[pre]
labels[batch_idx[batch_sub_label==sub_labels[idx]]]=ori_label#outliers_label[(self.split_num-1)*len(indexes)+n]
print('split num:',split_num)
elif self.method==3: #method1+cluster self(<8)
split_num = 0
ori_labels=labels[indexes]
unique_label=set(labels[indexes].tolist())
unique_map={}
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
if ori_labels[n].item() in unique_label:
unique_label=unique_label-set([ori_labels[n].item()])
else:
ori_knn_neighbor[n,-self.split_num:]=unique_map[ori_labels[n].item()]
continue
split_idxs = all_idxs[labels == labels[idx]]
if len(split_idxs) <= self.split_num:
unique_map[ori_labels[n].item()] = ori_knn_neighbor[n, -self.split_num:]
continue
split_feat = features[labels == labels[idx]]
anchor_idxs = []
anchor_indices = []
# 0
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if (torch.sum(split_sim) - 1.0) / (len(split_idxs) - 1) >= 0.7: # confident core
print('sub hei')
unique_map[ori_labels[n].item()] = ori_knn_neighbor[n, -self.split_num:]
continue
split_num += 1
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
Y = torch.zeros((len(split_idxs), self.split_num)).cuda()
Y[anchor_indices, torch.arange(self.split_num)] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
labels[idx] = idx
#append anchor[for two hop]
ori_knn_neighbor[n,-self.split_num:]=torch.tensor(anchor_idxs)
unique_map[ori_labels[n].item()]=torch.tensor(anchor_idxs)
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
#reduce duplicate
if ori_labels[n].item() in unique_label:
unique_label=unique_label-set([ori_labels[n].item()])
else:
ori_knn_neighbor[n, -self.split_num:]=unique_map[ori_labels[n].item()]
continue
batch_idx = all_idxs[labels == labels[idx]]
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind,split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,return_counts=True)
split_idxs = split_idxs.tolist() # sub label
if len(split_idxs) <= self.split_num:
unique_map[ori_labels[n].item()]=ori_knn_neighbor[n, -self.split_num:]
continue
anchor_idxs = []
anchor_indices = []
split_feat = features[split_idxs]
mean_cen = torch.from_numpy(split_cnts).cuda().unsqueeze(1) * split_feat
if (torch.sum(memory.features[idx] * mean_cen) - 1.0) / (
len(batch_idx) - 1) >= 0.6: # confident core
print('clu hei')
unique_map[ori_labels[n].item()] = ori_knn_neighbor[n, -self.split_num:]
continue
split_num += 1
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1, self.split_num):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y = torch.zeros((len(split_idxs), self.split_num)).cuda()
Y[anchor_indices, torch.arange(self.split_num)] = 1
# 104-->fix bug
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
labels[batch_idx[batch_sub_label == sub_labels[
idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
#add split guys
split_ind=torch.from_numpy(split_ind).cuda()
ori_knn_neighbor[n, -self.split_num:] = batch_idx[split_ind[anchor_indices]]
unique_map[ori_labels[n].item()]=batch_idx[split_ind[anchor_indices]]
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
print('split num:', split_num)
elif self.method == 4: # method1+anchor thre
split_num = 0
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
if len(split_idxs) <= self.split_num:
continue
split_feat = features[labels == labels[idx]]
anchor_idxs = []
anchor_indices = []
# 0
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim)>=self.anchor_thre:
continue
split_num += 1
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
labels[idx] = idx
# append anchor[for two hop]
if ori_knn_neighbor[n,-1]<0:
start=min(len(ori_knn_neighbor[n])-self.split_num,torch.argmin(ori_knn_neighbor[n]).item())
else:
start=len(ori_knn_neighbor[n])-self.split_num
ori_knn_neighbor[n, start:start+len(anchor_idxs)]=torch.tensor(anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)),len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs = split_idxs.tolist() # sub label
if len(split_idxs) <= self.split_num:
continue
anchor_idxs = []
anchor_indices = []
split_feat = features[split_idxs]
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(1, self.split_num):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# 104-->fix bug
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
if len(batch_idx)>3000:
print('pred:',pred)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
labels[batch_idx[batch_sub_label == sub_labels[
idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
# add split guys
split_ind = torch.from_numpy(split_ind).cuda()
if ori_knn_neighbor[n, -1] < 0:
start = min(len(ori_knn_neighbor[n]) - self.split_num, torch.argmin(ori_knn_neighbor[n]).item())
else:
start=len(ori_knn_neighbor[n]) - self.split_num
ori_knn_neighbor[n, start:start+len(anchor_idxs)] = batch_idx[split_ind[anchor_indices]]
print('{}| clu split idxs:'.format(len(batch_idx)), len(anchor_idxs))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
print('split num:', split_num)
elif self.method == 5: # method1+anchor thre+wo split self alone
split_num = 0
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
if len(split_idxs) <= self.split_num:
continue
split_feat = features[labels == labels[idx]]
anchor_idxs = []
anchor_indices = []
# 0
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim)>=self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(split_idxs[torch.argmax(split_sim)].item())#index self
anchor_indices.append(torch.argmax(split_sim).item())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(2, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
#labels[idx] = idx
# append anchor[for two hop]
if len(anchor_idxs)==self.split_num:
ori_knn_neighbor[n, -self.split_num:] = torch.tensor(anchor_idxs)
else:
ori_knn_neighbor[n, -self.split_num:-self.split_num+len(anchor_idxs)]=torch.tensor(anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)),len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs = split_idxs.tolist() # sub label
if len(split_idxs) <= self.split_num:
continue
anchor_idxs = []
anchor_indices = []
split_feat = features[split_idxs]
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(split_idxs[torch.argmax(split_sim).item()]) # index self
anchor_indices.append(torch.argmax(split_sim).item())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(2, self.split_num):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
# 104-->fix bug
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
if len(batch_idx)>3000:
print('pred:',pred)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
#ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
# labels[batch_idx[batch_sub_label == sub_labels[
# idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
# add split guys
split_ind = torch.from_numpy(split_ind).cuda()
if len(anchor_idxs)==self.split_num:
ori_knn_neighbor[n, -self.split_num:] = batch_idx[split_ind[anchor_indices]]
else:
ori_knn_neighbor[n, -self.split_num:-self.split_num+len(anchor_idxs)] = batch_idx[split_ind[anchor_indices]]
print('{}| clu split idxs:{} | {}'.format(len(batch_idx),len(anchor_idxs),split_cnts[anchor_indices]))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
elif self.method == 6: # method1+anchor thre+wo split self alone
empty_label = set(torch.arange(labels.max() + 1).tolist()) - set(labels.tolist())
split_num = 0
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
inter = list(set(ori_knn_neighbor[i].tolist()) & set(split_idxs.tolist()))
if len(inter)==0:
continue
# if len(split_idxs) <= self.split_num:
# continue
split_feat = features[labels == labels[idx]]
anchor_idxs = []
anchor_indices = []
# 0
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim)>=0.4:
continue
split_num += 1
anchor_idxs.append(split_idxs[torch.argmax(split_sim)].item())#index self
anchor_indices.append(torch.argmax(split_sim).item())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(2, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
#labels[idx] = idx
# append anchor[for two hop]
if len(anchor_idxs)==self.split_num:
ori_knn_neighbor[n, -self.split_num:] = torch.tensor(anchor_idxs)
else:
ori_knn_neighbor[n, -self.split_num:-self.split_num+len(anchor_idxs)]=torch.tensor(anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)),len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
empty_label_list=list(empty_label)
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
if len(batch_idx)<=self.split_num:
continue
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
batch_sub_label = sub_labels[batch_idx]
# split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
# return_counts=True)
split_idxs=batch_idx
split_idxs = split_idxs.tolist() # sub label
inter = list(set(ori_knn_neighbor[n].tolist()) & set(split_idxs))
if len(inter) <= 1:
continue
tmp_map = {}
for inter_n, inter_idx in enumerate(ori_knn_neighbor[n].tolist()):
tmp_map[inter_idx] = inter_n
inter_idxs = []
for aa in inter:
inter_idxs.append(tmp_map[aa])
#compute inter
for inter_idx in inter:
inter_n=tmp_map[inter_idx]
W_tmp = torch.sum(torch.min(point_W[n, inter_n, :].unsqueeze(1).expand_as(point_W[n,:,:]), point_W[n]), dim=-1)
point_pred[n,inter_n]=W_tmp / (2 - W_tmp)
point_pred[n,inter_n,inter_n]=0
#import pdb;pdb.set_trace()
anchor_idxs = []
anchor_indices = []
#split_feat = features[split_idxs]
#split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
split_sim=point_pred[n,tmp_map[idx.item()]][inter_idxs]
if torch.min(split_sim) >= 0.4:
continue
batch_map={}
for aa_n,aa in enumerate(batch_idx.tolist()):
batch_map[aa]=aa_n
split_num += 1
anchor_idxs.append(idx.item()) # index self
anchor_indices.append(batch_map[idx.item()])
anchor_idx = inter[torch.argmin(split_sim).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(batch_map[anchor_idx])
for sp in range(2,len(inter)):
# fix bug 20210116
split_sim_2=point_pred[n,tmp_map[anchor_idx]][inter_idxs]
#split_sim_2 = #features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=0.4:
continue
#anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idx=inter[torch.argmin(split_sim_2).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(batch_map[anchor_idx])
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
# 104-->fix bug
split_feat=features[labels == labels[idx]]
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
#labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
if len(empty_label_list)>=len(anchor_idxs):
labs=empty_label_list[:len(anchor_idxs)]
empty_label=empty_label-set(labs)
else:
labs=torch.arange(labels.max() + 1,labels.max() + 1+len(anchor_idxs))
#ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
# labels[batch_idx[batch_sub_label == sub_labels[
# idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
print('{}| clu split idxs:{}'.format(len(batch_idx),len(anchor_idxs)))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
elif self.method == 7: # method1+anchor thre+anchor idx in nei
split_num = 0
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
if len(split_idxs) <= self.split_num:
continue
anchor_idxs = []
anchor_indices = []
inter=list(set(split_idxs.tolist()) & set(ori_knn_neighbor[n].tolist()))
if len(inter)<=1:
continue
split_map={}
for sp_n,sp_idx in enumerate(split_idxs.tolist()):
split_map[sp_idx]=sp_n
# 0
split_feat=features[inter]
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim)>=self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(idx.item())#index self
anchor_indices.append(split_map[idx.item()])
anchor_idx = inter[torch.argmin(split_sim).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
for sp in range(2, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = inter[torch.argmin(split_sim_2).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
split_feat = features[labels == labels[idx]]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
#labels[idx] = idx
# append anchor[for two hop]
if len(anchor_idxs)==self.split_num:
ori_knn_neighbor[n, -self.split_num:] = torch.tensor(anchor_idxs)
else:
ori_knn_neighbor[n, -self.split_num:-self.split_num+len(anchor_idxs)]=torch.tensor(anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)),len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs = split_idxs.tolist() # sub label
if len(split_idxs) <= self.split_num:
continue
anchor_idxs = []
anchor_indices = []
inter=list(set(split_idxs.tolist()) & set(sub_labels[ori_knn_neighbor[n]].tolist()))
if len(inter)<=1:
continue
split_feat=features[inter]
split_map = {}
for sp_n, sp_idx in enumerate(split_idxs.tolist()):
split_map[sp_idx] = sp_n
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(idx.item()) # index self
anchor_indices.append(split_map[idx.item()])
anchor_idx = inter[torch.argmin(split_sim).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
for sp in range(2, self.split_num):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = inter[torch.argmin(split_sim_2).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
split_feat = features[split_idxs]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
# 104-->fix bug
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
if len(batch_idx)>3000:
print('pred:',pred)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
#ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
# labels[batch_idx[batch_sub_label == sub_labels[
# idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
print('{}| clu split idxs:{} | {}'.format(len(batch_idx),len(anchor_idxs),split_cnts[anchor_indices]))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
elif self.method == 8: # method1+anchor thre+anchor idx in nei+wo num restriction
split_num = 0
if sub_level:
print_cnts = 0
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
# if len(split_idxs) <= self.split_num:
# continue
anchor_idxs = []
anchor_indices = []
inter=list(set(split_idxs.tolist()) & set(ori_knn_neighbor[n].tolist()))
if len(inter)<=1:
continue
split_map={}
for sp_n,sp_idx in enumerate(split_idxs.tolist()):
split_map[sp_idx]=sp_n
# 0
split_feat=features[inter]
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim)>=self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(idx.item())#index self
anchor_indices.append(split_map[idx.item()])
anchor_idx = inter[torch.argmin(split_sim).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
for sp in range(2, len(inter)):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = inter[torch.argmin(split_sim_2).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
split_feat = features[labels == labels[idx]]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
#labels[idx] = idx
# append anchor[for two hop]
if len(anchor_idxs)==self.split_num:
ori_knn_neighbor[n, -self.split_num:] = torch.tensor(anchor_idxs)
else:
ori_knn_neighbor[n, -self.split_num:-self.split_num+len(anchor_idxs)]=torch.tensor(anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)),len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
empty_label = set(torch.arange(labels.max() + 1).tolist()) - set(labels.tolist())
print_cnts = 0
for n, idx in enumerate(indexes):
empty_label_list = list(empty_label)
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs = split_idxs.tolist() # sub label
# if len(split_idxs) <= self.split_num:
# continue
anchor_idxs = []
anchor_indices = []
inter=list(set(split_idxs) & set(sub_labels[ori_knn_neighbor[n]].tolist()))
if len(inter)<=1:
continue
split_feat=features[inter]
split_map = {}
for sp_n, sp_idx in enumerate(split_idxs.tolist()):
split_map[sp_idx] = sp_n
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(idx.item()) # index self
anchor_indices.append(split_map[idx.item()])
anchor_idx = inter[torch.argmin(split_sim).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
for sp in range(2, len(inter)):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2)>=self.anchor_thre:
continue
anchor_idx = inter[torch.argmin(split_sim_2).item()]
anchor_idxs.append(anchor_idx)
anchor_indices.append(split_map[anchor_idx])
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
split_feat = features[split_idxs]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
# 104-->fix bug
W = split_feat.mm(split_feat.t())
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
if len(batch_idx)>3000:
print('pred:',pred)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
if len(empty_label_list)>=len(anchor_idxs):
labs=empty_label_list[:len(anchor_idxs)]
empty_label=empty_label-set(labs)
else:
labs=torch.arange(labels.max() + 1,labels.max() + 1+len(anchor_idxs))
#ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
# labels[batch_idx[batch_sub_label == sub_labels[
# idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
print('{}| clu split idxs:{} | {}'.format(len(batch_idx),len(anchor_idxs),split_cnts[anchor_indices]))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
elif self.method == 9: #final one
split_num = 0
if sub_level:
for n, idx in enumerate(indexes):
split_idxs = all_idxs[labels == labels[idx]]
if len(split_idxs) <= self.split_num:
continue
split_feat = features[labels == labels[idx]]
anchor_idxs = []
anchor_indices = []
# 0
split_sim = features[idx].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(split_idxs[torch.argmax(split_sim)].item()) # index self
anchor_indices.append(torch.argmax(split_sim).item())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(2, self.split_num):
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2) >= self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx.item())
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==idx:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# fix bug 104
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# i_0,i_1=torch.argmin(split_sim),torch.argmin(split_sim_2)
# Y[i_0,0]=1
# Y[i_1,1]=1
# 104-->fix bug
W = torch.exp(split_feat.mm(split_feat.t()))
#0227#####
if W.size(-1)>self.connect_num:
topk, indices = torch.topk(W, self.connect_num, dim=-1)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(-1, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.t() > 0)).type(torch.float32)
W *= mask_top
############
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# lab=torch.tensor([anchor_idx.item(),anchor_idx_2.item()]).cuda()
lab = torch.tensor(anchor_idxs).cuda()
labels[split_idxs] = lab[pred]
# labels[idx] = idx
# append anchor[for two hop]
# if len(anchor_idxs) == self.split_num:
# ori_knn_neighbor[n, -self.split_num:] = torch.tensor(anchor_idxs)
# else:
# ori_knn_neighbor[n, -self.split_num:-self.split_num + len(anchor_idxs)] = torch.tensor(
# anchor_idxs)
print('{} | sub split idxs:'.format(len(split_idxs)), len(anchor_idxs))
# for debug
# if print_cnts==0:
# print(pred)
# print_cnts=1
else:
print_cnts = 0
for n, idx in enumerate(indexes):
# reduce duplicate
batch_idx = all_idxs[labels == labels[idx]]
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
batch_sub_label = sub_labels[batch_idx]
split_idxs, split_ind, split_cnts = np.unique(batch_sub_label.cpu().numpy(), return_index=True,
return_counts=True)
split_idxs = split_idxs.tolist() # sub label
if len(split_idxs) <= self.split_num:
continue
anchor_idxs = []
anchor_indices = []
split_feat = features[split_idxs]
split_sim = features[sub_labels[idx]].unsqueeze(0).mm(split_feat.t())
if torch.min(split_sim) >= self.anchor_thre:
continue
split_num += 1
anchor_idxs.append(split_idxs[torch.argmax(split_sim).item()]) # index self
anchor_indices.append(torch.argmax(split_sim).item())
anchor_idx = split_idxs[torch.argmin(split_sim)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim).item())
for sp in range(2, self.split_num):
# fix bug 20210116
split_sim_2 = features[anchor_idx].unsqueeze(0).mm(split_feat.t())
split_sim_2[split_sim_2 < split_sim] = split_sim[split_sim_2 < split_sim]
if torch.min(split_sim_2) >= self.anchor_thre:
continue
anchor_idx = split_idxs[torch.argmin(split_sim_2)]
anchor_idxs.append(anchor_idx)
anchor_indices.append(torch.argmin(split_sim_2).item())
split_sim = split_sim_2.clone()
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
# if anchor_idx_2==sub_labels[idx]:
# split_sim_2[0,torch.argmin(split_sim_2)]=1
# anchor_idx_2=split_idxs[torch.argmin(split_sim_2)]
Y = torch.zeros((len(split_idxs), len(anchor_idxs))).cuda()
Y[anchor_indices, torch.arange(len(anchor_idxs))] = 1
# if len(batch_idx)>3000:
# print('------------>3000-----------')
# import pdb;pdb.set_trace()
# 104-->fix bug
W = split_feat.mm(split_feat.t())
# 0227#####
if W.size(-1) > self.connect_num:
topk, indices = torch.topk(W, self.connect_num, dim=-1)
mask_top = torch.zeros_like(W)
mask_top = mask_top.scatter(-1, indices, 1)
mask_top = ((mask_top > 0) & (mask_top.t() > 0)).type(torch.float32)
W *= mask_top
############
W = torch.exp(-(2 - 2 * W))
mask = (1 - torch.eye(len(split_feat))).cuda()
W *= mask
D = W.sum(0)
D_sqrt_inv = torch.sqrt(1.0 / (D + self.eps))
D1 = torch.unsqueeze(D_sqrt_inv, 1).repeat(1, len(split_idxs))
D2 = torch.unsqueeze(D_sqrt_inv, 0).repeat(len(split_idxs), 1)
S = D1 * W * D2
pred = torch.matmul(torch.inverse(torch.eye(len(split_idxs)).cuda() - self.alpha * S + self.eps), Y)
pred = torch.argmax(pred, dim=1)
# if len(batch_idx) > 3000:
# print('pred:', pred)
# labs=torch.tensor([labels[idx].item(),outliers_label[n].item()]).cuda()
labs = outliers_label[torch.arange(n, len(outliers_label), step=len(indexes))]
# ori_label = labels[idx].item()
for sub, pre in zip(split_idxs, pred):
labels[batch_idx[batch_sub_label == sub]] = labs[pre]
# labels[batch_idx[batch_sub_label == sub_labels[
# idx]]] = ori_label # outliers_label[(self.split_num-1)*len(indexes)+n]
# add split guys
# split_ind = torch.from_numpy(split_ind).cuda()
# if len(anchor_idxs) == self.split_num:
# ori_knn_neighbor[n, -self.split_num:] = batch_idx[split_ind[anchor_indices]]
# else:
# ori_knn_neighbor[n, -self.split_num:-self.split_num + len(anchor_idxs)] = batch_idx[
# split_ind[anchor_indices]]
print('{}| clu split idxs:{} | {}'.format(len(batch_idx), len(anchor_idxs),
split_cnts[anchor_indices]))
# if print_cnts==0:
# print(pred)
# print_cnts=1
# split_nums.append([len(split_idxs)-torch.sum(pred).item(),torch.sum(pred).item()])
return ori_knn_neighbor
class Hierarchy_GCN(object):
def __init__(self,point_level_lp,sub_cluster_level_lp,cluster_level_lp,split_lp,utils,neighbor_num=64,thre=[0,06.15,0.1],
debug_label=[],merge_wo_outlier=0,jaccard_debug=0):
self.point_level_lp=point_level_lp
self.sub_cluster_level_lp=sub_cluster_level_lp
self.cluster_level_lp=cluster_level_lp
self.split_lp=split_lp
self.utils=utils #utils
self.neighbor_num=neighbor_num #knn
self.thre=thre
self.debug_label=debug_label
self.debug_label_num=None
self.merge_wo_outlier=merge_wo_outlier
self.two_hop=0
self.jaccard_debug=jaccard_debug
def train(self,s_indexes,memory,train,f_s):
if train:
#for loss_s backward
cal_feat=memory.momentum*f_s+(1. -memory.momentum)*memory.s_features[s_indexes]
with torch.no_grad():
norm=cal_feat.norm(dim=1).unsqueeze(1)
cal_feat/=norm
#cal knn neighbor
ori_0=compute_knn(memory.s_features.clone(),k1=self.neighbor_num)
ori_knn_neighbor=torch.from_numpy(ori_0[s_indexes.cpu().numpy(),:]).cuda()
#compute gt
all_gt_label=memory.s_label[ori_knn_neighbor.view(-1)].view(len(s_indexes),-1)
all_gt_sub_label=memory.s_sub_label[ori_knn_neighbor.view(-1)].view(len(s_indexes),-1)
#all_gt_sub_label=memory.s_sub_label[ori_knn_neighbor.view(-1)].view(len(s_indexes),-1)
gt_conf=(all_gt_label==all_gt_label[:,0].unsqueeze(1).expand_as(all_gt_label)).float()
loss_point_level=self.point_level_lp(s_indexes,memory.s_features,self.neighbor_num,ori_0,ori_knn_neighbor,gt_conf,f_s=cal_feat,train=1)
loss_sub_level=self.sub_cluster_level_lp(s_indexes,memory.s_features,self.neighbor_num,ori_0,ori_knn_neighbor,gt_conf,f_s=cal_feat,sub_label=memory.s_sub_label,gt_sub_label=all_gt_sub_label,gt_label=all_gt_label,train=1)
loss_cluster_level=self.cluster_level_lp(s_indexes,memory.s_features,self.neighbor_num,ori_0,ori_knn_neighbor,gt_conf,f_s=cal_feat,labels=memory.s_label,gt_label=all_gt_label,train=1)
#update feat
with torch.no_grad():
for x, y in zip(f_s, s_indexes):
memory.s_features[y] = memory.momentum * memory.s_features[y] + (1. - memory.momentum) * x
memory.s_features[y] /= memory.s_features[y].norm()
if train:
#train split
loss_split_gcn=self.split_gcn(s_indexes,memory.s_features,memory.s_label,train=1,ori_knn_neighbor=ori_knn_neighbor,gt=gt_conf.long())
loss_all=loss_point_level+loss_sub_level+loss_cluster_level
#print('point:{} sub:{} clu: {}'.format(loss_point_level,loss_sub_level,loss_cluster_level))
return loss_all,loss_split_gcn#[loss_point_level,loss_sub_level,loss_cluster_level]
else:
return torch.tensor(0),torch.tensor(0)
def inference(self,t_indexes,memory,infer):
torch.cuda.empty_cache()
#debug
#accs = []
# for i in range(len(t_indexes)):
# batch_lab = self.debug_label[memory.labels[memory.source_classes:] == memory.labels[t_indexes[i]]]
# if len(batch_lab) > 3:
# acc = 1.0 * torch.sum(batch_lab == self.debug_label[t_indexes[i] - memory.source_classes]) / len(
# batch_lab)
# accs.append('[{}] {:.2f} {}/{}'.format(t_indexes[i].item(),acc, len(batch_lab),int(self.debug_label_num[(self.debug_label[t_indexes[i]-memory.source_classes]).item()])))
# print('before acc:', accs)
# del batch_lab
#cal knn
cal_feat=memory.features[t_indexes]
if self.two_hop:
ori_0 = compute_knn(memory.features, k1=20)#20*20
else:
ori_0=compute_knn(memory.features,k1=self.neighbor_num)
ori_knn_neighbor=torch.from_numpy(ori_0[t_indexes.cpu().numpy(),:]).cuda()
########label-->outlier
#step1 change sub label
ori_labels=memory.t_sub_label[t_indexes]
change_sub_label=t_indexes[ori_labels==t_indexes]
memory.t_sub_label[t_indexes]=0
if len(change_sub_label)>0:
all_label=torch.arange(len(memory.features)).cuda()
for change_lab in change_sub_label:
change_idx=all_label[memory.t_sub_label==change_lab]
if len(change_idx)>0:
memory.t_sub_label[change_idx]=change_idx[0]
memory.t_sub_label[t_indexes]=t_indexes
#step2 change label
empty_label=set(torch.arange(memory.labels.max()+1).tolist())-set(memory.labels.tolist())
if len(empty_label)<2*len(t_indexes):
outliers_label=torch.arange(memory.labels.max()+1,memory.labels.max()+1+2*len(t_indexes)).cuda()
else:
empty_label=list(empty_label)[-2*len(t_indexes):]
outliers_label=torch.tensor(empty_label).cuda()
memory.labels[t_indexes]=outliers_label[:len(t_indexes)]
###########################
#point level pred
all_pred,point_W,point_neighbor=self.point_level_lp(t_indexes,memory.features,self.neighbor_num,ori_0,ori_knn_neighbor,f_s=cal_feat,train=0,two_hop=self.two_hop)
near_neigh,merge_idxs=self.point_level_merge_split(t_indexes,all_pred,point_neighbor,memory)
# if not self.two_hop:
# point_neighbor=torch.cat((point_neighbor,-1+torch.zeros((point_neighbor.size(0),self.split_lp.split_num)).long().cuda()),dim=-1)
# assert point_neighbor.size(1)==self.neighbor_num+self.split_lp.split_num
if self.jaccard_debug !=1:
# sub
# split sub cluster 1-->2 split gcn
point_neighbor = self.split_lp(t_indexes, memory.features, memory.t_sub_label, sub_level=1,
ori_knn_neighbor=point_neighbor, two_hop=self.two_hop, point_pred=all_pred)
all_gt_sub_label=memory.t_sub_label[point_neighbor.view(-1)].view(len(t_indexes),-1)
#self.split_gcn(t_indexes,memory.features,memory.t_sub_label,0,sub_label=1)
#all_pred_sub,self.sub_sum,self.sub_num,sub_mapping_0,sub_mapping_1=self.sub_cluster_level_lp(t_indexes,memory.features,self.neighbor_num,ori_0,ori_knn_neighbor,f_s=cal_feat,sub_label=memory.t_sub_label,gt_sub_label=all_gt_sub_label,debug_label=self.debug_label,bias=self.thre[1])
all_pred_sub,sub_sum,_,sub_mapping_0,sub_mapping_1,sub_mapping_2=self.cluster_level_lp(t_indexes,memory.features,self.neighbor_num,ori_0,point_neighbor,f_s=cal_feat,labels=memory.t_sub_label,gt_label=all_gt_sub_label,
debug_label=self.debug_label,bias=self.thre[1],step=1,point_W=point_W,two_hop=self.two_hop,memory=memory,point_pred=all_pred)
self.sub_cluster_level_merge_split(t_indexes,memory,all_pred_sub,point_neighbor,sub_mapping_0,sub_mapping_1,sub_mapping_2,near_neigh,merge_idxs)
else:
sub_sum=memory.features
#clu level pred
#outliers
empty_label=set(torch.arange(memory.labels.max()+1).tolist())-set(memory.labels.tolist())
if len(empty_label)<self.split_lp.split_num*len(t_indexes)+1:
outliers_label=torch.arange(memory.labels.max()+1,memory.labels.max()+2+self.split_lp.split_num*len(t_indexes)).cuda()
else:
empty_label=list(empty_label)[-(1+self.split_lp.split_num*len(t_indexes)):]
outliers_label=torch.tensor(empty_label).cuda()
point_neighbor=self.split_lp(t_indexes,sub_sum,memory.labels,sub_level=0,sub_labels=memory.t_sub_label,outliers_label=outliers_label,ori_knn_neighbor=point_neighbor,memory=memory,two_hop=self.two_hop,point_pred=all_pred,point_W=point_W)
all_gt_label=memory.labels[point_neighbor.view(-1)].view(len(t_indexes),-1)
#self.split_gcn(t_indexes,memory.features,memory.labels,0,sub_label=0,outliers_label=outliers_label[-len(t_indexes):],sub_labels=memory.t_sub_label)
all_pred_clu,_,_,clu_mapping_0,clu_mapping_1,clu_mapping_2=self.cluster_level_lp(t_indexes,memory.features,self.neighbor_num,ori_0,point_neighbor,f_s=cal_feat, labels=memory.labels,gt_label=all_gt_label,
debug_label=self.debug_label,bias=self.thre[2],step=2,point_W=point_W,two_hop=self.two_hop,memory=memory,point_pred=all_pred)
self.cluster_level_merge_split(t_indexes,memory,all_pred_clu,point_neighbor,clu_mapping_0,clu_mapping_1,clu_mapping_2,near_neigh,merge_idxs)
#import pdb;pdb.set_trace()
#cluster acc
# accs=[]
# for i in range(len(t_indexes)):
# batch_lab=self.debug_label[memory.labels[memory.source_classes:]==memory.labels[t_indexes[i]]]
# if len(batch_lab)>3:
# acc=1.0*torch.sum(batch_lab==self.debug_label[t_indexes[i]-memory.source_classes])/len(batch_lab)
# accs.append('[{}] {:.2f} {}/{}'.format(t_indexes[i].item(),acc, len(batch_lab), int(
# self.debug_label_num[(self.debug_label[t_indexes[i] - memory.source_classes]).item()])))
# print('after acc:',accs)
if self.jaccard_debug !=1:
del sub_mapping_0,sub_mapping_1,all_pred_sub
del clu_mapping_0,clu_mapping_1,all_pred_clu,sub_sum,point_neighbor,ori_knn_neighbor
def point_level_merge_split(self,indexes,all_pred,ori_knn_neighbor,memory):
topk=10 # indicate the chaos
bias=self.thre[0]
conf,near_nei=torch.max(all_pred[:,0],dim=1)
near_neig=ori_knn_neighbor[torch.arange(len(indexes)),near_nei]
#bias=all_pred[:,0,-1]
#merge
merge_idx=indexes[(near_nei<topk) & (conf>bias) & (near_neig>=memory.source_classes)] #wo consider source domain
merge_nei=near_neig[(near_nei<topk) & (conf>bias) & (near_neig>=memory.source_classes)].long()
itera = len(set(near_neig.tolist()) & set(indexes.tolist()))#fix bug
# if itera>1:
# print('--------itera:{}-------'.format(itera))
for i in range(itera + 1):
memory.t_sub_label[merge_idx] = memory.t_sub_label[merge_nei]
memory.labels[merge_idx] = memory.labels[merge_nei]
if self.merge_wo_outlier:
unq_lab,unq_cnt=np.unique(memory.labels.cpu().numpy(),return_counts=True)
self.outlier_clu=set(unq_lab[unq_cnt<2].tolist())
#outlier-->keep ori label
#print('outliers num:',len(indexes)-len(merge_idx))
#print('outliers:',list(set(indexes.tolist())-set(merge_idx.tolist())))
return near_neig,merge_idx
def sub_cluster_level_merge_split(self,indexes,memory,all_pred_sub,ori_knn_neighbor,sub_mapping_0,sub_mapping_1,sub_mapping_2,near_neighbor,merge_idxs):
#bias=all_pred_sub[:,0,-1]
bias=self.thre[1]
sub_lab=sub_mapping_0
#lab=memory.labels[sub_lab.view(-1)].view(len(indexes),-1)
#lab=memory.labels[ori_knn_neighbor.view(-1)].view(len(indexes),-1)
#####merge
merge_map={}
for i in range(len(indexes)):
if self.merge_wo_outlier and indexes[i] not in merge_idxs:
continue
#import pdb;pdb.set_trace()
# keep_idx=ori_knn_neighbor[i][sub_mapping_2[i]]
# if torch.min(keep_idx)==-1:
# print('-1')
# import pdb;pdb.set_trace()
# lab=memory.labels[keep_idx]
lab=memory.labels[sub_lab[i]]
merge_idx=set(sub_lab[i][(all_pred_sub[i,0,:len(sub_lab[i])]>bias) & (lab==memory.labels[indexes[i]])].tolist())
merge_idx.add(memory.t_sub_label[indexes[i]].item())
merge_idx=list(merge_idx)
if memory.t_sub_label[near_neighbor[i]].item() not in merge_idx: #reliable neighbor
continue
if len(merge_idx)>1:
merge_label_0=-1
inter=set(merge_idx) & set(merge_map.keys())
if len(inter)>0: #
inter_label=list(inter)
merge_label_0=merge_map[inter_label[0]]
if len(inter_label)>1:
change_guys=[]
for label in inter_label:
change_guys.append(merge_map[label])
for change_label,update_label in merge_map.items():
if update_label in change_guys:
merge_map[change_label]=merge_label_0
merge_label_0=merge_idx[0] if merge_label_0==-1 else merge_label_0
for label in merge_idx:
merge_map[label]=merge_label_0
for change_label,update_label in merge_map.items():
memory.t_sub_label[memory.t_sub_label==int(change_label)]=int(update_label)
print('sub merge:',len(merge_map))
#split cluster 1-->2 split gcn
def cluster_level_merge_split(self,indexes,memory,all_pred_clu,ori_knn_neighbor,clu_mapping_0,clu_mapping_1,clu_mapping_2,near_neighbor,merge_idxs):
#bias=all_pred_clu[:,0,-1]
#lab=memory.labels[ori_knn_neighbor.view(-1)].view(len(indexes),-1)
bias=self.thre[2]
lab=clu_mapping_0
#####merge
merge_map={}
for i in range(len(indexes)):
if self.merge_wo_outlier and indexes[i] not in merge_idxs: #only consider merge idx as core
continue
merge_idx=set(lab[i][(all_pred_clu[i,0,:len(lab[i])]>bias) & (lab[i]>=memory.source_classes)].tolist())
merge_idx.add(memory.labels[indexes[i]].item())
if self.merge_wo_outlier:
merge_idx=(merge_idx-self.outlier_clu)
merge_idx=list(merge_idx)
if memory.labels[near_neighbor[i]].item() not in merge_idx:
continue
# if len(merge_idx)>10:
# print('---->10-------')
# import pdb;pdb.set_trace()
if len(merge_idx)>1:
merge_label_0=-1
inter=set(merge_idx) & set(merge_map.keys())
if len(inter)>0: #
inter_label=list(inter)
merge_label_0=merge_map[inter_label[0]]
if len(inter_label)>1:
change_guys=[]
for label in inter_label:
change_guys.append(merge_map[label])
for change_label,update_label in merge_map.items():
if update_label in change_guys:
merge_map[change_label]=merge_label_0
merge_label_0=merge_idx[0] if merge_label_0==-1 else merge_label_0
for label in merge_idx:
merge_map[label]=merge_label_0
for change_label,update_label in merge_map.items():
memory.labels[memory.labels==int(change_label)]=int(update_label)
print('clu merge:',len(merge_map))
def postprocess(self,s_indexes,memory):
self.utils.update_sub_cluster_label(s_indexes,memory)
#step1 merge&split
#step2 update sub cluster label-->src
#others
def p_lp(alpha, method,**kwargs):
model = Point_Level_LP(alpha=alpha,method=method)
model.cuda()
#model = nn.DataParallel(model)
return model
def s_lp(alpha,topk_num,method,**kwargs):
model = Sub_Cluster_Level_LP(alpha=alpha,topk_num=topk_num,method=method)
model.cuda()
#model = nn.DataParallel(model)
return model
def c_lp(alpha, topk_num,method,point_wei,**kwargs):
model = Cluster_Level_LP(alpha=alpha,topk_num=topk_num,method=method,point_wei=point_wei)
model.cuda()
#model = nn.DataParallel(model)
return model
def split_gcn(feature_dim, nhid,feature_size, source_classes,nclass=1, dropout=0.,cal_num=30,**kwargs):
model=Split_GCN(feature_dim=feature_dim,
nhid=nhid,
feature_size=feature_size,
source_classes=source_classes,
nclass=nclass,
dropout=dropout,
cal_num=cal_num)
model.cuda()
return model
def split_lp(alpha,split_num,anchor_thre,**kwargs):
model=Split_LP(
alpha=alpha,
split_num=split_num,
anchor_thre=anchor_thre
)
model.cuda()
return model
class Utils(object): #in order to update * in memory
def __init__(self,k1,k2,thre):
self.k1=k1
self.k2=k2
self.thre=thre
self.density_sim=0.5
self.density_core_thre=0.7 #point sim>thre-->the same core
def initialize_sub_cluster_label(self,label,sub_cluster_label,features,start=0):
print('initialize sub cluster bank...')
if len(features)>20000:
tmp=features.cpu()
sim=tmp.mm(tmp.t())
density=torch.sum(torch.gt(sim,self.density_sim),dim=1).cuda()
density_core=torch.gt(sim,self.density_core_thre).cuda()
else:
sim=features.mm(features.t())
density=torch.sum(torch.gt(sim,self.density_sim),dim=1)
density_core=torch.gt(sim,self.density_core_thre)
all_idx=torch.arange(len(label)).cuda()
unique_label=list(set(label.tolist()))
for un_idx,i in enumerate(unique_label):
if un_idx%100==0:
print('[{}/{}]'.format(un_idx,len(unique_label)))
i_idx=(all_idx[label==i])
i_density=density[i_idx]
if torch.sum(torch.ge(i_density,self.thre))>0:
#find connection
high_density_idx=i_idx[i_density>=self.thre]
i_density_core=density_core[high_density_idx][:,high_density_idx]
sub_cluster_label[i_idx]=-1 #clean
#core
for left_id in range(len(high_density_idx)):
neighbor=high_density_idx[i_density_core[left_id]>0]
if len(neighbor)>1:
neighbor_label=sub_cluster_label[neighbor]
neighbor_label=neighbor_label[neighbor_label>-1]
if len(neighbor_label)>0:
sub_cluster_label[high_density_idx[left_id].item()]=neighbor_label[0].item()
else:
sub_cluster_label[high_density_idx[left_id].item()]=high_density_idx[left_id].item()
else:
sub_cluster_label[high_density_idx[left_id].item()]=high_density_idx[left_id].item()
#others
i_sim=sim[i_idx][:,i_idx]
i_sim[:,i_density<self.thre]=-1
match=torch.argmax(i_sim,dim=1)
sub_cluster_label[i_idx]=sub_cluster_label[i_idx[match]]+start
else:
match=torch.argmax(i_density)
sub_cluster_label[i_idx]=i_idx[match]+start
assert torch.min(sub_cluster_label)>=0
def initialize_sub_cluster_label_ori(self,label,sub_cluster_label,features,start=0): #initialize
print('initialize sub cluster bank...')
#compute density
sim=features.mm(features.t())
density=torch.sum(torch.gt(sim,self.density_sim),dim=1)
print('high density num:',torch.sum(density>self.thre))
#combine_density=torch.gt(sim,self.density_combine_thre)
rerank_dist = torch.from_numpy(compute_jaccard_distance_inital_rank(features, k1=self.k1, k2=self.k2)).cuda()
all_idx=torch.arange(len(label))
unique_label=list(set(label.tolist()))
for i in unique_label:
#import pdb;pdb.set_trace()
i_rerank_dist=rerank_dist[label==i][:,label==i]
#i_combine=combine_density[label==i][:,label==i]
i_density=density[label==i]
i_features=features[label==i]
i_idx=(all_idx[label==i])
if torch.sum(torch.ge(i_density,self.thre))>0: #have high density guys
i_rerank_dist[:,torch.lt(i_density,self.thre)]=1
match=torch.argmin(i_rerank_dist,dim=1)
try:
sub_cluster_label[label==i]=(i_idx[match]).cuda()+start
except:
print('sub_cluster_labe error')
import pdb;pdb.set_trace()
#sub_cluster_featurebank[label==i]=features[label==i][match]
else: #all low density-->one sub cluster
match=torch.argmax(i_density)
try:
sub_cluster_label[label==i]=(i_idx[match]).cuda()+start
except:
print('sub_cluster_label single error')
import pdb;pdb.set_trace()
#sub_cluster_featurebank[label==i]=i_idx[match]
assert torch.max(sub_cluster_label)-start<len(sub_cluster_label)
del sim,rerank_dist
print('Done')
def update_sub_cluster_label(self,indexes,memory): #update online
index_label=list(set(memory.s_label[indexes].tolist()))
all_idx=torch.arange(len(memory.s_label)).cuda()
#update sub label for these lael
core_nums=[]
for label in index_label:
i_idx=all_idx[(memory.s_label==label)]
feat=memory.s_features[i_idx]
sim=feat.mm(feat.t())
#density
i_density=torch.sum(torch.gt(sim,self.density_sim),dim=1)
if torch.sum(torch.ge(i_density,self.thre))>0:
i_density_core=torch.gt(sim,self.density_core_thre)
high_density_idx=i_idx[i_density>=self.thre]
i_density_core=i_density_core[i_density>=self.thre][:,i_density>=self.thre]
memory.s_sub_label[i_idx]=-1
for left_id in range(len(high_density_idx)):
neighbor=high_density_idx[i_density_core[left_id]>0]
if len(neighbor)>1:
neighbor_label=memory.s_sub_label[neighbor]
neighbor_label=neighbor_label[neighbor_label>-1]
if len(neighbor_label)>0:
memory.s_sub_label[high_density_idx[left_id].item()]=neighbor_label[0].item()
else:
memory.s_sub_label[high_density_idx[left_id].item()]=high_density_idx[left_id].item()
else:
memory.s_sub_label[high_density_idx[left_id].item()]=high_density_idx[left_id].item()
#others
sim[:,i_density<self.thre]=-1
match=torch.argmax(sim,dim=1)
memory.s_sub_label[i_idx]=memory.s_sub_label[i_idx[match]]
core_nums.append(len(set(memory.s_sub_label[i_idx].tolist())))
else:
match=torch.argmax(i_density)
memory.s_sub_label[i_idx]=i_idx[match]
core_nums.append(1)
#print('core_nums:',core_nums)
| 49.793308
| 292
| 0.514748
| 20,052
| 159,239
| 3.837622
| 0.017903
| 0.039609
| 0.025964
| 0.034073
| 0.886332
| 0.858028
| 0.837314
| 0.820187
| 0.806919
| 0.790506
| 0
| 0.02814
| 0.361074
| 159,239
| 3,197
| 293
| 49.808883
| 0.728205
| 0.13859
| 0
| 0.813859
| 0
| 0
| 0.004344
| 0
| 0
| 0
| 0
| 0
| 0.000906
| 1
| 0.011775
| false
| 0
| 0.004529
| 0
| 0.026721
| 0.026721
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
779a576a1ce16956977928f7269f514bac0bb605
| 85
|
py
|
Python
|
spacenav_wrapper/src/spacenav_wrapper/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
spacenav_wrapper/src/spacenav_wrapper/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
spacenav_wrapper/src/spacenav_wrapper/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
from space_wrapper import SpacenavWrapper
from space_wrapper import SpacenavRezeroer
| 28.333333
| 42
| 0.905882
| 10
| 85
| 7.5
| 0.6
| 0.24
| 0.426667
| 0.586667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 85
| 2
| 43
| 42.5
| 0.974026
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
77df997014c3dff06a1f27de3fef2ca8455ae427
| 11,990
|
py
|
Python
|
dev/Report/generate_all_report_plots.py
|
aakash30jan/Couette-Poiseuille_FlowCode
|
3110d5d818cb8fdfb4959e58d9dcbc48db325122
|
[
"CC-BY-4.0"
] | 9
|
2019-01-05T09:05:05.000Z
|
2021-11-22T19:04:14.000Z
|
dev/Report/generate_all_report_plots.py
|
aakash30jan/Couette-Poiseuille_FlowCode
|
3110d5d818cb8fdfb4959e58d9dcbc48db325122
|
[
"CC-BY-4.0"
] | null | null | null |
dev/Report/generate_all_report_plots.py
|
aakash30jan/Couette-Poiseuille_FlowCode
|
3110d5d818cb8fdfb4959e58d9dcbc48db325122
|
[
"CC-BY-4.0"
] | 3
|
2020-02-28T03:44:34.000Z
|
2020-09-10T05:32:54.000Z
|
#import libraries
import numpy as np
import matplotlib.pyplot as plt
#ALPHA=np.arange(1,16)
plt.rcParams.update({'font.size': 11}) #Font 11
simDataDir='../Simulated_Data/DA/'
def plot(ALPHA):
simDataFile=simDataDir+'Case_01_A'+str(ALPHA)+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[:,0],case_sim[:,1],'-',label='Alpha= 10E-'+str(ALPHA))
return;
expDataDir='../Experimental_Data/'
expDataFile=expDataDir+'Case_01_exp.dat'
case_exp=np.loadtxt(expDataFile)
for ALPHA in range (1,16,7):
plot(ALPHA)
plt.plot(case_exp[:,0],case_exp[:,1],'ks',label='Experimental Data')
plt.xlabel('y/2h')
plt.ylabel('U/Uq')
plt.title('Variation of Alpha (Grid Stretching)')
plt.legend()
plt.savefig('VariationOfAlpha.eps')
plt.show()
##########
##########
#import libraries
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir1='../Simulated_Data/G1/'
simDataDir2='../Simulated_Data/G2/'
simDataDir3='../Simulated_Data/G3/'
expDataDir='../Experimental_Data/'
simDataFile=simDataDir1+'Case_'+caseChar+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[:,0],case_sim[:,1],'r-',label='N= 10001')
simDataFile=simDataDir2+'Case_'+caseChar+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[:,0],case_sim[:,1],'b-',label='N= 1001')
simDataFile=simDataDir3+'Case_'+caseChar+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[:,0],case_sim[:,1],'g-',label='N= 101')
expDataDir='../Experimental_Data/'
expDataFile=expDataDir+'Case_'+caseChar+'_exp.dat'
case_exp=np.loadtxt(expDataFile)
plt.plot(case_exp[:,0],case_exp[:,1],'kx',label='Experimental Data')
plt.xlabel('y/2h')
plt.ylabel('U/Uq')
plt.title('Variation Grid Points for Case '+caseChar)
plt.legend()
plt.savefig('Compare_N_Case'+caseChar+'.eps')
plt.show()
return;
for CASE in range (1,19,1):
plot(CASE)
#############
############
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir1='../Simulated_Data/G4/'
simDataDir2='../Simulated_Data/G5/'
expDataDir='../Experimental_Data/'
simDataFile=simDataDir1+'Case_'+caseChar+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[1:100,0],case_sim[1:100,1],'r-',label='Alpha= 1E-01')
simDataFile=simDataDir2+'Case_'+caseChar+'_sim.dat'
case_sim=np.loadtxt(simDataFile)
plt.plot(case_sim[1:100,0],case_sim[1:100,1],'b-',label='Alpha= 1E-15')
expDataDir='../Experimental_Data/'
expDataFile=expDataDir+'Case_'+caseChar+'_exp.dat'
case_exp=np.loadtxt(expDataFile)
plt.plot(case_exp[:,0],case_exp[:,1],'kx',label='Experimental Data')
plt.xlabel('y/2h')
plt.ylabel('U/Uq')
plt.title('Variation Alpha for Case '+caseChar)
plt.legend()
plt.savefig('Compare_Alpha_Case'+caseChar+'.eps')
plt.show()
return;
for CASE in range (1,19,1):
plot(CASE)
###################
#CASE(x) vs iterations(y) vs color(N)
#case_sim=open(simDataFile)
#s=linecache.getline(simDataFile,4) ##if file is huge after doing 'import linecache'
#lines=case_sim.readlines()
#print lines[2] #Headers
#print lines[3]
#s=lines[3].replace('#','')
#heads=np.array([float(i) for i in s.split()])
##heads information #ALPHA,N,ITERATIONS,VW,UMAX,UAVG,UTAU1,UTAU2
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir1='../Simulated_Data/G1/'
simDataDir2='../Simulated_Data/G2/'
simDataDir3='../Simulated_Data/G3/'
simDataFile=simDataDir1+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
plt.plot(CASE,heads[2],'ro')
simDataFile=simDataDir2+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
plt.plot(CASE,heads[2],'bs')
simDataFile=simDataDir3+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
plt.plot(CASE,heads[2],'gv')
return;
for CASE in range (1,19,1):
plot(CASE)
ax = plt.subplot(111)
ax.plot(0,0,'ro',label='N= 10001')
ax.plot(0,0,'bs',label='N= 1001')
ax.plot(0,0,'gv',label='N= 101')
ax.plot([0,19],[10000,10000],'-k',label='Convergence\nCriteria')
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.xlabel('Case Number')
plt.ylabel('Number of Iterations')
plt.title('Comparison of Convergence and Grid Points(N) ')
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.savefig('Compare_Convergence_N.eps')
plt.show()
####################
####################
###################
#CASE(x) vs iterations(y) vs color(ALPHA)
#case_sim=open(simDataFile)
#s=linecache.getline(simDataFile,4) ##if file is huge after doing 'import linecache'
#lines=case_sim.readlines()
#print lines[2] #Headers
#print lines[3]
#s=lines[3].replace('#','')
#heads=np.array([float(i) for i in s.split()])
##heads information #ALPHA,N,ITERATIONS,VW,UMAX,UAVG,UTAU1,UTAU2
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir1='../Simulated_Data/G4/'
simDataDir2='../Simulated_Data/G5/'
simDataDir3='../Simulated_Data/G6/'
simDataFile=simDataDir1+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
plt.plot(CASE,heads[2],'ro')
simDataFile=simDataDir2+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
#plt.plot(CASE,heads[2],'bs')
simDataFile=simDataDir3+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
plt.plot(CASE,heads[2],'gv')
return;
for CASE in range (1,19,1):
plot(CASE)
ax = plt.subplot(111)
ax.plot(0,0,'ro',label='a= 1E-01')
ax.plot(0,0,'gv',label='a= 1E-06')
#ax.plot(0,0,'bs',label='a= 1E-15')
ax.plot([0,19],[10000,10000],'-k',label='Convergence\nCriteria')
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.xlabel('Case Number')
plt.ylabel('Number of Iterations')
plt.title('Comparison of Convergence and Alpha(a)') #for fixed N=101
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.savefig('Compare_Convergence_Alpha.eps')
plt.show()
####################
####################
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir3='../Simulated_Data/G6/'
simDataFile=simDataDir3+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
#plt.plot(CASE,heads[4],'gv') #Umax
#plt.plot(CASE,heads[5],'gv') #Uavg
return heads;
sim_Umax=np.zeros(18)
sim_Uavg=np.zeros(18)
for CASE in range (1,19,1):
sim_Umax[CASE-1]=plot(CASE)[4] #Umax
sim_Uavg[CASE-1]=plot(CASE)[5] #Uavg
##IMP##
#from El Telbany's paper first 15 values and 16-18 from Gilliot's thesis
exp_Umax=np.array([12.84,12.84,12.84,12.84,12.84,8.50,17.08,12.84,8.59,13.25,16.33,21.57,24.01,23.62,16.00,2.90,3.10,3.70])
exp_Uavg=np.array([6.42,7.28,8.06,8.14,8.81,0.71,14.55,11.38,7.70,12.40,15.10,20.11,22.40,21.90,14.55,2.50,2.50,2.55])
CASE=np.arange(1,19,1)
plt.plot(CASE,sim_Umax,'-ro',label='Umax Simulated')
plt.plot(CASE,exp_Umax,'-go',label='Umax Experimental')
plt.plot(CASE,sim_Uavg,'-bs',label='Uavg Simulated')
plt.plot(CASE,exp_Uavg,'-ys',label='Uavg Experimental')
plt.xlabel('Case Number')
plt.ylabel('Velocity (m/s)')
plt.title('Comparison of Umax and Uavg') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Umax_Uavg.eps')
plt.show()
plt.plot(CASE,sim_Umax,'-ro',label='Simulated')
plt.plot(CASE,exp_Umax,'-go',label='Experimental')
plt.xlabel('Case Number')
plt.ylabel('Velocity (m/s)')
plt.title('Comparison of Umax') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Umax.eps')
plt.show()
plt.plot(CASE,sim_Uavg,'-bs',label='Simulated')
plt.plot(CASE,exp_Uavg,'-ys',label='Experimental')
plt.xlabel('Case Number')
plt.ylabel('Velocity (m/s)')
plt.title('Comparison of Uavg') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Uavg.eps')
plt.show()
####################
####################
import numpy as np
import matplotlib.pyplot as plt
plt.rcParams.update({'font.size': 11}) #Font 11
def plot(CASE):
if (CASE<10):
caseChar='0'+str(CASE)
else:
caseChar=str(CASE)
simDataDir3='../Simulated_Data/G6/'
simDataFile=simDataDir3+'Case_'+caseChar+'_sim.dat'
case_sim=open(simDataFile)
lines=case_sim.readlines()
s=lines[3].replace('#','')
heads=np.array([float(i) for i in s.split()])
return heads;
sim_Utau1=np.zeros(18)
sim_Utau2=np.zeros(18)
for CASE in range (1,19,1):
sim_Utau1[CASE-1]=plot(CASE)[6] #Utau1
sim_Utau2[CASE-1]=plot(CASE)[7] #Utau2
##IMP##
#from El Telbany's paper first 15 values and 16-18 from Gilliot's thesis
exp_Utau1=np.array([0.282,0.328,0.362,0.357,0.383,0.313,0.600,0.485,0.350,0.564,0.679,0.880,0.978,0.961,0.659,0.150,0.140,0.150])
exp_Utau2=np.array([0.282,0.233,0.1809,0.1669,0.1305,0.0615,0.0400,0.0229,0.0084,0.0300,0.1860,0.4142,0.518,0.670,0.659,0.09,0.04,0.05])
CASE=np.arange(1,19,1)
plt.plot(CASE,sim_Utau1,'-ro',label='Utau1 Simulated')
plt.plot(CASE,exp_Utau1,'-go',label='Utau1 Experimental')
plt.plot(CASE,sim_Utau2,'-bs',label='Utau2 Simulated')
plt.plot(CASE,exp_Utau2,'-ys',label='Utau2 Experimental')
plt.xlabel('Case Number')
plt.ylabel('Parameter for Wall Stress (m/s)')
plt.title('Comparison of Utau1 and Utau2') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Utau1_Utau2.eps')
plt.show()
plt.plot(CASE,sim_Utau1,'-ro',label='Simulated')
plt.plot(CASE,exp_Utau1,'-go',label='Experimental')
plt.xlabel('Case Number')
plt.ylabel('Parameter for High-Stress Wall (m/s)')
plt.title('Comparison of Utau1') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Utau1.eps')
plt.show()
plt.plot(CASE,sim_Utau2,'-bs',label='Simulated')
plt.plot(CASE,exp_Utau2,'-ys',label='Experimental')
plt.xlabel('Case Number')
plt.ylabel('Parameter for Low-Stress Wall (m/s)')
plt.title('Comparison of Utau2') #for fixed N=101 and Alpha=1E-06 #G6
#plt.ylim(0,10050)
plt.xlim(0,19)
x_label=np.arange(0,20,1)
plt.xticks(x_label)
#plt.grid(axis='x')
plt.legend()
plt.savefig('Compare_Utau2.eps')
plt.show()
| 25.784946
| 136
| 0.680484
| 2,020
| 11,990
| 3.955446
| 0.116832
| 0.047059
| 0.045432
| 0.022778
| 0.880851
| 0.871589
| 0.857697
| 0.81627
| 0.754693
| 0.747184
| 0
| 0.072145
| 0.101751
| 11,990
| 464
| 137
| 25.840517
| 0.669731
| 0.13211
| 0
| 0.755932
| 0
| 0
| 0.201524
| 0.053324
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023729
| false
| 0
| 0.047458
| 0
| 0.077966
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77e76160d2fc6332a6c0b6e665a7d32f5a4eb5a1
| 8,978
|
py
|
Python
|
tests/contrib/celery/test_integration.py
|
sharov/dd-trace-py
|
d0995b49cf7147ab463d0a67a38779fad3f539b4
|
[
"BSD-3-Clause"
] | 1
|
2019-11-24T23:09:29.000Z
|
2019-11-24T23:09:29.000Z
|
tests/contrib/celery/test_integration.py
|
sharov/dd-trace-py
|
d0995b49cf7147ab463d0a67a38779fad3f539b4
|
[
"BSD-3-Clause"
] | null | null | null |
tests/contrib/celery/test_integration.py
|
sharov/dd-trace-py
|
d0995b49cf7147ab463d0a67a38779fad3f539b4
|
[
"BSD-3-Clause"
] | 1
|
2021-01-24T13:44:57.000Z
|
2021-01-24T13:44:57.000Z
|
from nose.tools import eq_, ok_
from .utils import CeleryTestCase
class CeleryIntegrationTask(CeleryTestCase):
"""
Ensures that the tracer works properly with a real Celery application
without breaking the Application or Task APIs.
"""
def test_concurrent_delays(self):
# it should create one trace for each delayed execution
@self.app.task
def fn_task():
return 42
for x in range(100):
fn_task.delay()
traces = self.tracer.writer.pop_traces()
eq_(100, len(traces))
def test_fn_task(self):
# it should execute a traced task with a returning value
@self.app.task
def fn_task():
return 42
t = fn_task.apply()
ok_(t.successful())
eq_(42, t.result)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.fn_task', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.fn_task', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('SUCCESS', traces[0][0].get_tag('state'))
def test_fn_task_bind(self):
# it should execute a traced task with a returning value
@self.app.task(bind=True)
def fn_task(self):
return self
t = fn_task.apply()
ok_(t.successful())
ok_('fn_task' in t.result.name)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.fn_task', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.fn_task', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('SUCCESS', traces[0][0].get_tag('state'))
def test_fn_task_parameters(self):
# it should execute a traced task that has parameters
@self.app.task
def fn_task_parameters(user, force_logout=False):
return (user, force_logout)
t = fn_task_parameters.apply(args=['user'], kwargs={'force_logout': True})
ok_(t.successful())
eq_('user', t.result[0])
ok_(t.result[1] is True)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('SUCCESS', traces[0][0].get_tag('state'))
def test_fn_task_parameters_bind(self):
# it should execute a traced task that has parameters
@self.app.task(bind=True)
def fn_task_parameters(self, user, force_logout=False):
return (self, user, force_logout)
t = fn_task_parameters.apply(args=['user'], kwargs={'force_logout': True})
ok_(t.successful())
ok_('fn_task_parameters' in t.result[0].name)
eq_('user', t.result[1])
ok_(t.result[2] is True)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('SUCCESS', traces[0][0].get_tag('state'))
def test_fn_task_parameters_async(self):
# it should execute a traced async task that has parameters
@self.app.task
def fn_task_parameters(user, force_logout=False):
return (user, force_logout)
t = fn_task_parameters.apply_async(args=['user'], kwargs={'force_logout': True})
eq_('PENDING', t.status)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(1, len(traces[0]))
eq_('celery.task.apply_async', traces[0][0].name)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][0].resource)
eq_('celery', traces[0][0].service)
ok_(traces[0][0].get_tag('id') is not None)
def test_fn_task_parameters_delay(self):
# using delay shorthand must preserve arguments
@self.app.task
def fn_task_parameters(user, force_logout=False):
return (user, force_logout)
t = fn_task_parameters.delay('user', force_logout=True)
eq_('PENDING', t.status)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(1, len(traces[0]))
eq_('celery.task.apply_async', traces[0][0].name)
eq_('tests.contrib.celery.test_integration.fn_task_parameters', traces[0][0].resource)
eq_('celery', traces[0][0].service)
ok_(traces[0][0].get_tag('id') is not None)
def test_fn_exception(self):
# it should catch exceptions in task functions
@self.app.task
def fn_exception():
raise Exception('Task class is failing')
r = fn_exception.apply()
ok_(r.failed())
ok_('Task class is failing' in r.traceback)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.fn_exception', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.fn_exception', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('FAILURE', traces[0][0].get_tag('state'))
eq_(1, traces[0][1].error)
eq_('Task class is failing', traces[0][1].get_tag('error.msg'))
ok_('Traceback (most recent call last)' in traces[0][1].get_tag('error.stack'))
ok_('Task class is failing' in traces[0][1].get_tag('error.stack'))
def test_class_task(self):
# it should execute class based tasks with a returning value
class BaseTask(self.app.Task):
def run(self):
return 42
t = BaseTask()
# register the Task class if it's available (required in Celery 4.0+)
register_task = getattr(self.app, 'register_task', None)
if register_task is not None:
register_task(t)
r = t.apply()
ok_(r.successful())
eq_(42, r.result)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.BaseTask', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.BaseTask', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('SUCCESS', traces[0][0].get_tag('state'))
def test_class_task_exception(self):
# it should catch exceptions in class based tasks
class BaseTask(self.app.Task):
def run(self):
raise Exception('Task class is failing')
t = BaseTask()
# register the Task class if it's available (required in Celery 4.0+)
register_task = getattr(self.app, 'register_task', None)
if register_task is not None:
register_task(t)
r = t.apply()
ok_(r.failed())
ok_('Task class is failing' in r.traceback)
traces = self.tracer.writer.pop_traces()
eq_(1, len(traces))
eq_(2, len(traces[0]))
eq_('celery.task.apply', traces[0][0].name)
eq_('celery.task.run', traces[0][1].name)
eq_('tests.contrib.celery.test_integration.BaseTask', traces[0][0].resource)
eq_('tests.contrib.celery.test_integration.BaseTask', traces[0][1].resource)
eq_('celery', traces[0][0].service)
eq_('celery', traces[0][1].service)
eq_('FAILURE', traces[0][0].get_tag('state'))
eq_(1, traces[0][1].error)
eq_('Task class is failing', traces[0][1].get_tag('error.msg'))
ok_('Traceback (most recent call last)' in traces[0][1].get_tag('error.stack'))
ok_('Task class is failing' in traces[0][1].get_tag('error.stack'))
| 39.725664
| 94
| 0.612163
| 1,260
| 8,978
| 4.169048
| 0.098413
| 0.09861
| 0.054826
| 0.060918
| 0.880069
| 0.864268
| 0.841424
| 0.811156
| 0.781649
| 0.781649
| 0
| 0.026857
| 0.236912
| 8,978
| 225
| 95
| 39.902222
| 0.739892
| 0.086545
| 0
| 0.8
| 0
| 0
| 0.205018
| 0.10355
| 0
| 0
| 0
| 0
| 0
| 1
| 0.114286
| false
| 0
| 0.011429
| 0.045714
| 0.188571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7acead34ac5852e4a83ad09195c4e826ba811c68
| 13,130
|
py
|
Python
|
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/server_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/server_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
venv/lib/python3.8/site-packages/spaceone/api/inventory/v1/server_pb2_grpc.py
|
choonho/plugin-prometheus-mon-webhook
|
afa7d65d12715fd0480fb4f92a9c62da2d6128e0
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2
from spaceone.api.inventory.v1 import server_pb2 as spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2
class ServerStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.create = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/create',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.CreateServerRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
)
self.update = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/update',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.UpdateServerRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
)
self.pin_data = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/pin_data',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.PinServerDataRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
)
self.delete = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/delete',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
self.get = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/get',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.GetServerRequest.SerializeToString,
response_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
)
self.list = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/list',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerQuery.SerializeToString,
response_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServersInfo.FromString,
)
self.stat = channel.unary_unary(
'/spaceone.api.inventory.v1.Server/stat',
request_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerStatQuery.SerializeToString,
response_deserializer=google_dot_protobuf_dot_struct__pb2.Struct.FromString,
)
class ServerServicer(object):
"""Missing associated documentation comment in .proto file."""
def create(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def update(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def pin_data(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def delete(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def get(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def stat(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ServerServicer_to_server(servicer, server):
rpc_method_handlers = {
'create': grpc.unary_unary_rpc_method_handler(
servicer.create,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.CreateServerRequest.FromString,
response_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.SerializeToString,
),
'update': grpc.unary_unary_rpc_method_handler(
servicer.update,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.UpdateServerRequest.FromString,
response_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.SerializeToString,
),
'pin_data': grpc.unary_unary_rpc_method_handler(
servicer.pin_data,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.PinServerDataRequest.FromString,
response_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.SerializeToString,
),
'delete': grpc.unary_unary_rpc_method_handler(
servicer.delete,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
'get': grpc.unary_unary_rpc_method_handler(
servicer.get,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.GetServerRequest.FromString,
response_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.SerializeToString,
),
'list': grpc.unary_unary_rpc_method_handler(
servicer.list,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerQuery.FromString,
response_serializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServersInfo.SerializeToString,
),
'stat': grpc.unary_unary_rpc_method_handler(
servicer.stat,
request_deserializer=spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerStatQuery.FromString,
response_serializer=google_dot_protobuf_dot_struct__pb2.Struct.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'spaceone.api.inventory.v1.Server', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Server(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def create(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/create',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.CreateServerRequest.SerializeToString,
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def update(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/update',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.UpdateServerRequest.SerializeToString,
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def pin_data(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/pin_data',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.PinServerDataRequest.SerializeToString,
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def delete(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/delete',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerRequest.SerializeToString,
google_dot_protobuf_dot_empty__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def get(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/get',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.GetServerRequest.SerializeToString,
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def list(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/list',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerQuery.SerializeToString,
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServersInfo.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def stat(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/spaceone.api.inventory.v1.Server/stat',
spaceone_dot_api_dot_inventory_dot_v1_dot_server__pb2.ServerStatQuery.SerializeToString,
google_dot_protobuf_dot_struct__pb2.Struct.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
| 49.17603
| 128
| 0.689185
| 1,376
| 13,130
| 6.149709
| 0.080669
| 0.040416
| 0.061215
| 0.074332
| 0.90865
| 0.905342
| 0.89057
| 0.8381
| 0.776058
| 0.762586
| 0
| 0.010128
| 0.240518
| 13,130
| 266
| 129
| 49.360902
| 0.838448
| 0.060929
| 0
| 0.553571
| 1
| 0
| 0.07686
| 0.047537
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.017857
| 0.03125
| 0.133929
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ad62519e2a3f38adccb4cb96ef0d7dff18d6fd6
| 603
|
py
|
Python
|
tests/test_train.py
|
ckm3/Deep-Transit
|
52f2e81b2beb0d73974741ea78ba84ea52e8fc2e
|
[
"MIT"
] | 3
|
2021-08-03T01:21:22.000Z
|
2021-09-21T15:23:32.000Z
|
tests/test_train.py
|
ckm3/Deep-Transit
|
52f2e81b2beb0d73974741ea78ba84ea52e8fc2e
|
[
"MIT"
] | 1
|
2021-08-21T00:24:08.000Z
|
2021-08-23T03:41:26.000Z
|
tests/test_train.py
|
ckm3/Deep-Transit
|
52f2e81b2beb0d73974741ea78ba84ea52e8fc2e
|
[
"MIT"
] | null | null | null |
import pytest
import deep_transit as dt
def test_train():
dt.config.DATASET = 'tests/Data'
dt.config.IMG_DIR = dt.config.DATASET + "/transit-images/"
dt.config.LABEL_DIR = dt.config.DATASET + "/transit-labels/"
dt.config.BATCH_SIZE = 2
dt.config.NUM_EPOCHS = 1
dt.train()
def test_mge_train():
dt.config.DATASET = 'tests/Data'
dt.config.IMG_DIR = dt.config.DATASET + "/transit-images/"
dt.config.LABEL_DIR = dt.config.DATASET + "/transit-labels/"
dt.config.BATCH_SIZE = 2
dt.config.NUM_EPOCHS = 1
from deep_transit.mge.train import train
train()
| 26.217391
| 64
| 0.681592
| 90
| 603
| 4.422222
| 0.288889
| 0.281407
| 0.226131
| 0.180905
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0.758794
| 0
| 0.00813
| 0.18408
| 603
| 23
| 65
| 26.217391
| 0.800813
| 0
| 0
| 0.588235
| 0
| 0
| 0.139073
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.117647
| true
| 0
| 0.176471
| 0
| 0.294118
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
bb361b32a73b7308681c73e4ba6bab4f6178154c
| 212
|
py
|
Python
|
Tests/TestEnvironment/__init__.py
|
dev-11/eigen-technical-task
|
c0b041fc2bd27d2706ccdab94f6eb618f17098bd
|
[
"MIT"
] | null | null | null |
Tests/TestEnvironment/__init__.py
|
dev-11/eigen-technical-task
|
c0b041fc2bd27d2706ccdab94f6eb618f17098bd
|
[
"MIT"
] | null | null | null |
Tests/TestEnvironment/__init__.py
|
dev-11/eigen-technical-task
|
c0b041fc2bd27d2706ccdab94f6eb618f17098bd
|
[
"MIT"
] | null | null | null |
from .mocks import get_test_single_sentence, get_test_three_sentences, mocked_document_service,\
mocked_interesting_service, mocked_interesting_service_with_low_interesting_rate, get_test_duplicated_sentence
| 70.666667
| 114
| 0.90566
| 28
| 212
| 6.178571
| 0.607143
| 0.121387
| 0.277457
| 0.358382
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061321
| 212
| 2
| 115
| 106
| 0.869347
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
bb52203898c7af6d5fb2acda88ad22bb2286dd90
| 36,764
|
py
|
Python
|
trulioo_sdk/api/verifications_api.py
|
Trulioo/sdk-python
|
3bf0530e2ba1a3ec93d89b967b2e257e7401d5c2
|
[
"RSA-MD"
] | 1
|
2022-01-11T12:08:45.000Z
|
2022-01-11T12:08:45.000Z
|
trulioo_sdk/api/verifications_api.py
|
Trulioo/sdk-python
|
3bf0530e2ba1a3ec93d89b967b2e257e7401d5c2
|
[
"RSA-MD"
] | null | null | null |
trulioo_sdk/api/verifications_api.py
|
Trulioo/sdk-python
|
3bf0530e2ba1a3ec93d89b967b2e257e7401d5c2
|
[
"RSA-MD"
] | 1
|
2021-05-17T08:33:15.000Z
|
2021-05-17T08:33:15.000Z
|
"""
Trulioo Python SDK
Package version: 1.0.4
Trulioo OpenAPI version: v1
Generated by OpenAPI Generator version: 5.0.1
"""
import re # noqa: F401
import sys # noqa: F401
from trulioo_sdk.api_client import ApiClient, Endpoint as _Endpoint
from trulioo_sdk.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from trulioo_sdk.model.transaction_record_result import TransactionRecordResult
from trulioo_sdk.model.transaction_status import TransactionStatus
from trulioo_sdk.model.verify_request import VerifyRequest
from trulioo_sdk.model.verify_result import VerifyResult
class VerificationsApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __document_download(
self,
transaction_record_id,
field_name,
mode="trial",
**kwargs
):
"""Document Download # noqa: E501
Download Document # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.document_download(transaction_record_id, field_name, mode="trial", async_req=True)
>>> result = thread.get()
Args:
transaction_record_id (str): id of the transactionrecord, this will be a GUID
field_name (str): document field name
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['transaction_record_id'] = \
transaction_record_id
kwargs['field_name'] = \
field_name
return self.call_with_http_info(**kwargs)
self.document_download = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/documentdownload/{transactionRecordId}/{fieldName}',
'operation_id': 'document_download',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'transaction_record_id',
'field_name',
],
'required': [
'mode',
'transaction_record_id',
'field_name',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'transaction_record_id':
(str,),
'field_name':
(str,),
},
'attribute_map': {
'mode': 'mode',
'transaction_record_id': 'transactionRecordId',
'field_name': 'fieldName',
},
'location_map': {
'mode': 'path',
'transaction_record_id': 'path',
'field_name': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__document_download
)
def __get_transaction_record(
self,
id,
mode="trial",
**kwargs
):
"""Get Transaction Record # noqa: E501
This method is used to retrieve the request and results of a verification performed using the verify method. The response for this method includes the same information as verify method's response, along with data present in the input fields of the verify request. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_record(id, mode="trial", async_req=True)
>>> result = thread.get()
Args:
id (str): id of the transactionrecord, this will be a GUID
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TransactionRecordResult
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_transaction_record = _Endpoint(
settings={
'response_type': (TransactionRecordResult,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/transactionrecord/{id}',
'operation_id': 'get_transaction_record',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'id',
],
'required': [
'mode',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'id':
(str,),
},
'attribute_map': {
'mode': 'mode',
'id': 'id',
},
'location_map': {
'mode': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_transaction_record
)
def __get_transaction_record_address(
self,
id,
mode="trial",
**kwargs
):
"""Get Transaction Record Address # noqa: E501
Fetch the results of a verification with the TransactionRecordId for the transaction this will include additional information if your account includes address cleansing. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_record_address(id, mode="trial", async_req=True)
>>> result = thread.get()
Args:
id (str): id of the transactionrecord, this will be a GUID
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TransactionRecordResult
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_transaction_record_address = _Endpoint(
settings={
'response_type': (TransactionRecordResult,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/transactionrecord/{id}/withaddress',
'operation_id': 'get_transaction_record_address',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'id',
],
'required': [
'mode',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'id':
(str,),
},
'attribute_map': {
'mode': 'mode',
'id': 'id',
},
'location_map': {
'mode': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_transaction_record_address
)
def __get_transaction_record_document(
self,
transaction_record_id,
document_field,
mode="trial",
**kwargs
):
"""Get Transaction Record Document # noqa: E501
This method is used to retrieve the document of a verification performed using the verify method. The response for this method includes the processed base64 JPEG formatted string # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_record_document(transaction_record_id, document_field, mode="trial", async_req=True)
>>> result = thread.get()
Args:
transaction_record_id (str): id of the transactionrecord, this will be a GUID
document_field (str): FieldName of the Document, this will be a string
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
str
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['transaction_record_id'] = \
transaction_record_id
kwargs['document_field'] = \
document_field
return self.call_with_http_info(**kwargs)
self.get_transaction_record_document = _Endpoint(
settings={
'response_type': (str,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/transactionrecord/{transactionRecordID}/{documentField}',
'operation_id': 'get_transaction_record_document',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'transaction_record_id',
'document_field',
],
'required': [
'mode',
'transaction_record_id',
'document_field',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'transaction_record_id':
(str,),
'document_field':
(str,),
},
'attribute_map': {
'mode': 'mode',
'transaction_record_id': 'transactionRecordID',
'document_field': 'documentField',
},
'location_map': {
'mode': 'path',
'transaction_record_id': 'path',
'document_field': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_transaction_record_document
)
def __get_transaction_record_verbose(
self,
id,
mode="trial",
**kwargs
):
"""Get Transaction Record Verbose # noqa: E501
Fetch the results of a verification with the TransactionRecordId for the transaction this will include additional information if your account includes address cleansing and watchlist details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_record_verbose(id, mode="trial", async_req=True)
>>> result = thread.get()
Args:
id (str): id of the transactionrecord, this will be a GUID
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TransactionRecordResult
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_transaction_record_verbose = _Endpoint(
settings={
'response_type': (TransactionRecordResult,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/transactionrecord/{id}/verbose',
'operation_id': 'get_transaction_record_verbose',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'id',
],
'required': [
'mode',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'id':
(str,),
},
'attribute_map': {
'mode': 'mode',
'id': 'id',
},
'location_map': {
'mode': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_transaction_record_verbose
)
def __get_transaction_status(
self,
id,
mode="trial",
**kwargs
):
"""Get Transaction Status # noqa: E501
This method is used to retrieve the processing status of an asynchronous transaction. The response for this method includes the processing status of the verification, the TransactionID, the TransactionRecordID as well as whether the verification request has timed out. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_transaction_status(id, mode="trial", async_req=True)
>>> result = thread.get()
Args:
id (str): id of the asynchronous transaction, this will be a GUID
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
TransactionStatus
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['id'] = \
id
return self.call_with_http_info(**kwargs)
self.get_transaction_status = _Endpoint(
settings={
'response_type': (TransactionStatus,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/transaction/{id}/status',
'operation_id': 'get_transaction_status',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'mode',
'id',
],
'required': [
'mode',
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'id':
(str,),
},
'attribute_map': {
'mode': 'mode',
'id': 'id',
},
'location_map': {
'mode': 'path',
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client,
callable=__get_transaction_status
)
def __verify(
self,
verify_request,
mode="trial",
**kwargs
):
"""Verify # noqa: E501
Calling this method will perform a verification. If your account includes address cleansing set the CleansedAddress flag to get additional address information in the result. You can query configuration to get what fields are available to you in each each country. It is also possible to get sample requests from the customer portal. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.verify(verify_request, mode="trial", async_req=True)
>>> result = thread.get()
Args:
verify_request (VerifyRequest):
mode (str): trial or live. defaults to "trial", must be one of ["trial"]
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
VerifyResult
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['mode'] = \
mode
kwargs['verify_request'] = \
verify_request
return self.call_with_http_info(**kwargs)
self.verify = _Endpoint(
settings={
'response_type': (VerifyResult,),
'auth': [
'ApiKeyAuth'
],
'endpoint_path': '/{mode}/verifications/v1/verify',
'operation_id': 'verify',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'mode',
'verify_request',
],
'required': [
'mode',
'verify_request',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'mode':
(str,),
'verify_request':
(VerifyRequest,),
},
'attribute_map': {
'mode': 'mode',
},
'location_map': {
'mode': 'path',
'verify_request': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json',
'text/json'
],
'content_type': [
'application/json',
'text/json'
]
},
api_client=api_client,
callable=__verify
)
| 37.861998
| 361
| 0.466326
| 3,175
| 36,764
| 5.175748
| 0.078425
| 0.029575
| 0.022151
| 0.023002
| 0.852675
| 0.831315
| 0.820361
| 0.799489
| 0.778738
| 0.762125
| 0
| 0.00367
| 0.451529
| 36,764
| 970
| 362
| 37.901031
| 0.811297
| 0.345528
| 0
| 0.685241
| 1
| 0
| 0.217646
| 0.058363
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012048
| false
| 0
| 0.012048
| 0
| 0.036145
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bb5ad9a436f974ee473c3debd4208cbff609709d
| 177
|
py
|
Python
|
integration_tests/test-packages/python/pythonspecific/pythonspecific/__init__.py
|
franklinen/doppel-cli
|
959041ceec578b63fa507b0d71e2ce9e752fb5b7
|
[
"BSD-3-Clause"
] | 5
|
2019-03-11T12:44:59.000Z
|
2021-02-01T08:10:41.000Z
|
integration_tests/test-packages/python/pythonspecific/pythonspecific/__init__.py
|
franklinen/doppel-cli
|
959041ceec578b63fa507b0d71e2ce9e752fb5b7
|
[
"BSD-3-Clause"
] | 174
|
2019-01-20T03:08:44.000Z
|
2021-11-03T04:25:56.000Z
|
integration_tests/test-packages/python/pythonspecific/pythonspecific/__init__.py
|
franklinen/doppel-cli
|
959041ceec578b63fa507b0d71e2ce9e752fb5b7
|
[
"BSD-3-Clause"
] | 17
|
2019-04-16T18:23:53.000Z
|
2021-10-01T15:01:40.000Z
|
# flake8: noqa
from pythonspecific.SomeException import SomeException
# sub-modules
import pythonspecific.mod_one
import pythonspecific.mod_two
import pythonspecific.mod_three
| 22.125
| 54
| 0.864407
| 21
| 177
| 7.142857
| 0.571429
| 0.4
| 0.46
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006211
| 0.090395
| 177
| 7
| 55
| 25.285714
| 0.925466
| 0.135593
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
24ca881a322fee77dfbfdc53c7f5f3d3a57db8a1
| 67,052
|
py
|
Python
|
test/subjective_model_test.py
|
sghill/sureal
|
df4bc7a9cfd380569ecf2252be014977c68c792b
|
[
"Apache-2.0"
] | 88
|
2018-02-03T08:28:58.000Z
|
2022-03-22T09:28:52.000Z
|
test/subjective_model_test.py
|
sghill/sureal
|
df4bc7a9cfd380569ecf2252be014977c68c792b
|
[
"Apache-2.0"
] | 15
|
2018-06-19T14:42:39.000Z
|
2022-02-21T07:19:03.000Z
|
test/subjective_model_test.py
|
sghill/sureal
|
df4bc7a9cfd380569ecf2252be014977c68c792b
|
[
"Apache-2.0"
] | 29
|
2018-03-01T16:01:00.000Z
|
2022-01-23T09:57:26.000Z
|
import os
import unittest
import numpy as np
from sureal.config import SurealConfig
from sureal.dataset_reader import RawDatasetReader, MissingDataRawDatasetReader, \
SyntheticRawDatasetReader, CorruptSubjectRawDatasetReader
from sureal.subjective_model import MosModel, DmosModel, \
LegacyMaximumLikelihoodEstimationModel, MaximumLikelihoodEstimationModel, \
LiveDmosModel, MaximumLikelihoodEstimationDmosModel, LeastSquaresModel, \
SubjrejMosModel, ZscoringSubjrejMosModel, SubjrejDmosModel, \
ZscoringSubjrejDmosModel, PerSubjectModel, \
MaximumLikelihoodEstimationModelContentOblivious, \
MaximumLikelihoodEstimationModelSubjectOblivious, ZscoringMosModel, BiasremvMosModel, BiasremvSubjrejMosModel, SubjectMLEModelProjectionSolver, SubjectMLEModelProjectionSolver2
from sureal.tools.misc import import_python_file
__copyright__ = "Copyright 2016-2018, Netflix, Inc."
__license__ = "Apache, Version 2.0"
class SubjectiveModelTest(unittest.TestCase):
def setUp(self):
self.dataset_filepath = SurealConfig.test_resource_path('NFLX_dataset_public_raw.py')
self.output_dataset_filepath = SurealConfig.workdir_path('NFLX_dataset_public_test.py')
self.output_dataset_pyc_filepath = SurealConfig.workdir_path('NFLX_dataset_public_test.pyc')
def tearDown(self):
if os.path.exists(self.output_dataset_filepath):
os.remove(self.output_dataset_filepath)
if os.path.exists(self.output_dataset_pyc_filepath):
os.remove(self.output_dataset_pyc_filepath)
def test_mos_subjective_model(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 4.884615384615385, places=4)
self.assertAlmostEqual(scores[10], 2.0769230769230771, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.544790652385589, places=4)
scores_std = result['quality_scores_std']
self.assertAlmostEqual(float(np.mean(scores_std)), 0.12986637295658307, places=4)
quality_ambiguity = result['quality_ambiguity']
self.assertAlmostEqual(float(np.mean(quality_ambiguity)), 0.6621911698651353, places=4)
self.assertAlmostEqual(result['dof'], 0.07692307692307693, places=6)
self.assertAlmostEqual(result['loglikelihood'], -0.9384709649191117, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.183732241710059, places=6)
self.assertAlmostEqual(result['aic'], 2.0307880836843775, places=6)
self.assertAlmostEqual(result['bic'], 2.4636761137219545, places=6)
def test_mos_subjective_model_output(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath)
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.08461538461538462, places=4)
def test_mos_subjective_model_output_aggregate_content_ids(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath, aggregate_content_ids=[0, 2])
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.08461538461538462, places=4)
def test_mos_subjective_model_output_aggregate_asset_ids(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath, aggregate_asset_ids=[0, 2])
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.08461538461538462, places=4)
def test_mos_subjective_model_output_os_is_dict_style(self):
dataset = import_python_file(SurealConfig.test_resource_path('test_dataset_os_as_dict.py'))
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath)
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
print(dataset2.dis_videos)
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 2.6666666666666665, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.881917103688197, places=4)
def test_mos_subjective_model_output_custom_resampling(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath, resampling_type='lanczos')
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
self.assertFalse(hasattr(dataset2, 'quality_height'))
self.assertFalse(hasattr(dataset2, 'quality_width'))
self.assertEqual(dataset2.resampling_type, 'lanczos')
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.08461538461538462, places=4)
def test_mos_subjective_model_output2(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
dataset2 = subjective_model.to_aggregated_dataset()
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.08461538461538462, places=4)
def test_mos_subjective_model_normalize_final(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(normalize_final=True)
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 1.1318646945818083, places=4)
self.assertAlmostEqual(scores[10], -1.2400334499143002, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
def test_mos_subjective_model_transform_final(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(transform_final={'p1': 10, 'p0': 1})
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 49.84615384615385, places=4)
self.assertAlmostEqual(scores[10], 21.769230769230771, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 36.44790652385589, places=4)
def test_from_dataset_file(self):
subjective_model = MosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 4.884615384615385, places=4)
self.assertAlmostEqual(scores[10], 2.0769230769230771, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.544790652385589, places=4)
def test_dmos_subjective_model(self):
subjective_model = DmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 5.0, places=4)
self.assertAlmostEqual(scores[10], 2.1923076923076921, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.7731256085686473, places=4)
scores_std = result['quality_scores_std']
self.assertAlmostEqual(float(np.mean(scores_std)), 0.12986637295658307, places=4)
def test_dmos_subjective_model_normalize_final(self):
subjective_model = DmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling(normalize_final=True)
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 1.0440613892053001, places=4)
self.assertAlmostEqual(scores[10], -1.3452648137895296, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
def test_dmos_subjective_model_dscore_mode_same(self):
subjective_model = DmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling(normalize_final=True)
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 1.0440613892053001, places=4)
self.assertAlmostEqual(scores[10], -1.3452648137895296, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
def test_observer_aware_subjective_model_with_dscoring(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(dscore_mode=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.090840910829083799, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095089, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.012565584832977776, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 298.35293969059796, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4163670233392607, places=4)
def test_observer_aware_subjective_model_with_zscoring(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(zscore_mode=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 11.568205661696393, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.0079989301785523791, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 0.80942484781493518, places=4)
def test_observer_aware_subjective_model_with_dscoring_and_zscoring(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(dscore_mode=True, zscore_mode=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 11.628499078069273, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.0082089371266301642, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 0.80806512456121071, places=4)
def test_observer_aware_subjective_model_use_log(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(use_log=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.082429594509296211, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095089, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.012565584832977776, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.2889206910113, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4355485462027884, places=4)
def test_observer_content_aware_subjective_model(self):
subjective_model = MaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.8972884776604402, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0041122094732031289, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.055712761348815837, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.085842891905121704, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 10.164665557559516, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028749990587721687, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.20774261173619, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4351342153719635, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity_std'])), 0.30465244947706538, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 1.7392847550878989, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 22.108576292956428, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 8.8863877635750423, places=4)
self.assertAlmostEqual(result['dof'], 0.06815968841285297, places=6)
self.assertAlmostEqual(result['loglikelihood'], -0.8897673811562866, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2332790063154353, places=6)
self.assertAlmostEqual(result['aic'], 1.915854139138279, places=6)
self.assertAlmostEqual(result['bic'], 2.299425811323474, places=6)
def test_observer_content_aware_subjective_model_subjbias_zeromean(self):
subjective_model = MaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling()
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.8972884776604402, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0041122094732031289, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.085842891905121704, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 10.164665557559516, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028749990587721687, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.0384615291764, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4351342153719635, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity_std'])), 0.30465244947706538, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 1.7392847550878989, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 22.108576292956428, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 8.8863877635750423, places=4)
def test_observer_content_aware_subjective_model_original(self):
subjective_model = MaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(gradient_method='original', force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.8972884776604402, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0041122094732031289, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.055712761348815837, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.085842891905121704, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 10.164665557559516, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028749990587721687, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.20774261173619, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4351342153719635, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity_std'])), 0.30465244947706538, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 1.7392847550878989, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 22.108576292956428, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 8.8863877635750423, places=4)
def test_observer_content_aware_subjective_model_numerical(self):
subjective_model = MaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(gradient_method='numerical', force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.8972884776604402, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0041122094732031289, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.055712761348815837, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.085842891905121704, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 10.164665557559516, places=3)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028749990587721687, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.20774261173619, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4351342153719635, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity_std'])), 0.30465244947706538, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 1.7392847550878989, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 12.393285044624955, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 8.8863877635750423, places=4)
def test_observer_content_aware_subjective_model_missingdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'missing_probability': 0.1,
}
dataset_reader = MissingDataRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.9104244772977128, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0037713583509767193, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.21903272050455846, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.084353684687185043, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 9.8168943054654481, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028159236075789944, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.05548186797336, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4339487982797514, places=4)
np.random.seed(0)
info_dict = {
'missing_probability': 0.5,
}
dataset_reader = MissingDataRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 2.63184284168883, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.019164097909450246, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.2263148440748638, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.070613033112114504, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 12.317917502439435, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.029455722248727296, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.29962156788139, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4717366222424826, places=4)
def test_observer_content_aware_subjective_model_nocontent(self):
subjective_model = MaximumLikelihoodEstimationModelContentOblivious.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.090840910829083799, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095089, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.012565584832977776, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.31447815213642, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4355485462027884, places=4)
self.assertAlmostEqual(result['dof'], 0.06377799415774099, places=6)
self.assertAlmostEqual(result['loglikelihood'], -0.8967394355890235, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2347392971084559, places=6)
self.assertAlmostEqual(result['aic'], 1.921034859493529, places=6)
self.assertAlmostEqual(result['bic'], 2.2799483527525326, places=6)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 1.7643365374531321, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.2475743287658851, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 8.907545016644042, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_ci95'])), 6.916058079893282, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_ci95'])), 5.002792923339208, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_ci95'])), 34.91686386164329, places=4)
def test_observer_content_aware_subjective_model_nocontent_subjbias_zeromean(self):
subjective_model = MaximumLikelihoodEstimationModelContentOblivious.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling()
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.0, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095089, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.012565584832977776, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.0384615384633, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4355485462027884, places=4)
self.assertAlmostEqual(result['dof'], 0.06377799415774099, places=6)
self.assertAlmostEqual(result['loglikelihood'], -0.8967394355890235, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2347392971084559, places=6)
self.assertAlmostEqual(result['aic'], 1.921034859493529, places=6)
self.assertAlmostEqual(result['bic'], 2.2799483527525326, places=6)
def test_observer_content_aware_subjective_model_nosubject(self):
subjective_model = MaximumLikelihoodEstimationModelSubjectOblivious.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.0384615384616, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4012220200639218, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 6.06982228334157, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0045809756997836721, places=4)
self.assertAlmostEqual(result['dof'], 0.042843232716650435, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.02419628655795, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.183732241710059, places=6)
self.assertAlmostEqual(result['aic'], 2.1340790385492006, places=6)
self.assertAlmostEqual(result['bic'], 2.3751812324941803, places=6)
def test_observer_aware_subjective_model_synthetic(self):
np.random.seed(0)
dataset = import_python_file(self.dataset_filepath)
info_dict = {
'quality_scores': np.random.uniform(1, 5, 79),
'observer_bias': np.random.normal(0, 1, 26),
'observer_inconsistency': np.abs(np.random.uniform(0.4, 0.6, 26)),
'content_bias': np.zeros(9),
'content_ambiguity': np.zeros(9),
}
dataset_reader = SyntheticRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = LegacyMaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.90138622499935517, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.84819162765420342, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 12.742288471632817, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.0047638169604076975, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 236.78529213581052, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.3059726132293354, places=4)
def test_observer_aware_subjective_model(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.090840910829083799, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095089, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.012565584832977776, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.31447815213642, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4355485462027884, places=4)
def test_observer_aware_subjective_model_missingdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'missing_probability': 0.1,
}
dataset_reader = MissingDataRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = LegacyMaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.18504017984241944, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.087350553292201705, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.520738471447299, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.010940587327083341, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 279.94975274863879, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4325574378911554, places=4)
np.random.seed(0)
info_dict = {
'missing_probability': 0.5,
}
dataset_reader = MissingDataRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = LegacyMaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0.057731868199093525, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.081341845650928557, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 14.996238224489693, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.013666025579465165, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.67100837103203, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4637917512768972, places=4)
def test_livedmos_subjective_model(self):
subjective_model = LiveDmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 65.307711974116913, places=4)
self.assertAlmostEqual(scores[10], 30.204773267864258, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 50.0, places=4)
def test_livedmos_subjective_model_normalize_final(self):
subjective_model = LiveDmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling(normalize_final=True)
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 1.0392964273048528, places=4)
self.assertAlmostEqual(scores[10], -1.3439701802061783, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
def test_livedmos_subjective_model_dscore_mode_bad(self):
subjective_model = LiveDmosModel.from_dataset_file(self.dataset_filepath)
with self.assertRaises(AssertionError):
subjective_model.run_modeling(dscore_mode=True)
def test_observer_aware_subjective_model_corruptdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = LegacyMaximumLikelihoodEstimationModel(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.mean(result['quality_scores'])), 3.5573073781669944, places=4) # 3.5482845335713469
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.3559834438740614, places=4) # 1.4355485462027884
def test_mos_subjective_model_corruptdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855899, places=4)
self.assertAlmostEqual(float(np.var(scores)), 0.95893305294535369, places=4) # 1.4012220200639218
def test_mos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(subject_rejection=True)
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 3.5611814345991566, places=4)
self.assertAlmostEqual(float(np.var(scores)), 1.1049505732699529, places=4) # 1.4012220200639218
def test_zscore_mos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(zscore_mode=True, subject_rejection=True)
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
self.assertAlmostEqual(float(np.var(scores)), 0.66670826882879042, places=4)
def test_observer_aware_subjective_model_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = LegacyMaximumLikelihoodEstimationModel(dataset_reader)
with self.assertRaises(AssertionError):
subjective_model.run_modeling(subject_rejection=True)
def test_observer_content_aware_subjective_model_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MaximumLikelihoodEstimationModel(dataset_reader)
with self.assertRaises(AssertionError):
subjective_model.run_modeling(subject_rejection=True, force_subjbias_zeromean=False)
def test_observer_content_aware_subjective_dmos_model(self):
subjective_model = MaximumLikelihoodEstimationDmosModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 288.56842946051466, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4166132275824235, places=4)
self.assertAlmostEqual(float(np.sum(result['content_ambiguity'])), 3.8972884776604402, places=4)
self.assertAlmostEqual(float(np.var(result['content_ambiguity'])), 0.0041122094732031289, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 3.1293776428507774, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.085842891905121704, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 10.164665557559516, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.028749990587721687, places=4)
def test_dmos_mle_co_model(self):
subjective_model = MaximumLikelihoodEstimationModelContentOblivious.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.31447815213642, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4355485462027884, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.090840910829074084, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621095048, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.681766163430936, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.01256558483297778, places=4)
def test_least_squares_model(self):
subjective_model = LeastSquaresModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling()
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 280.03846153847428, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4012220200638821, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), 0, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.089032585621522581, places=4)
def test_subjrejmos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = SubjrejMosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 3.5611814345991566, places=4)
self.assertAlmostEqual(float(np.var(scores)), 1.1049505732699529, places=4) # 1.4012220200639218
self.assertAlmostEqual(result['dof'], 0.07692307692307693, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.2051956998810835, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3565171169581582, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.0511662919205282, places=6)
self.assertAlmostEqual(result['aic'], 2.564237553608321, places=6)
self.assertAlmostEqual(result['bic'], 2.9971255836458983, places=6)
def test_zscoremos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = ZscoringMosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
self.assertAlmostEqual(float(np.var(scores)), 0.5405866214633748, places=4) # 1.4012220200639218
self.assertAlmostEqual(result['dof'], 0.07692307692307693, places=6)
self.assertAlmostEqual(result['loglikelihood'], -0.9696021743118809, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 0.99365072945774, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 0.7352459598415858, places=6)
self.assertAlmostEqual(result['aic'], 2.0930505024699158, places=6)
self.assertAlmostEqual(result['bic'], 2.525938532507493, places=6)
def test_biasremv_mos_subjective_model_corruptdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = BiasremvMosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
bias = result['observer_bias']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855885, places=8)
self.assertAlmostEqual(float(np.var(scores)), 0.9589330529453537, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.0, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151982, places=8)
self.assertAlmostEqual(result['dof'], 0.08958130477117819, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.2761533126002955, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.332411174171261, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 0.9792512716077287, places=6)
self.assertAlmostEqual(result['aic'], 2.7314692347429474, places=6)
self.assertAlmostEqual(result['bic'], 3.2355920039006323, places=6)
def test_biasremv_subjrej_mos_subjective_model_corruptdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = BiasremvSubjrejMosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
bias = result['observer_bias']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855885, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.09500013352561, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.0, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151982, places=8)
self.assertAlmostEqual(result['dof'], 0.08958130477117819, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.1737836830835549, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3307052960550632, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.04642254062382, places=6)
self.assertAlmostEqual(result['aic'], 2.526729975709466, places=6)
self.assertAlmostEqual(result['bic'], 3.030852744867151, places=6)
def test_zscoresubjrejmos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = ZscoringSubjrejMosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 0, places=4)
self.assertAlmostEqual(float(np.var(scores)), 0.66670826882879042, places=4) # 1.4012220200639218
def test_subjrejdmos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = SubjrejDmosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 4.0246673158065542, places=4)
self.assertAlmostEqual(float(np.var(scores)), 1.0932580358187849, places=4) # 1.4012220200639218
def test_zscoresubjrejdmos_subjective_model_corruptdata_subjreject(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = ZscoringSubjrejDmosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(float(np.mean(scores)), 0, places=4)
self.assertAlmostEqual(float(np.var(scores)), 0.66405245792414114, places=4) # 1.4012220200639218
def test_persubject_subjective_model_output(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = PerSubjectModel(dataset_reader)
subjective_model.run_modeling(transform_final={'p1':25, 'p0':-25})
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath)
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 100.0, places=4)
def test_proj_mle_subjective_model_corruptdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = SubjectMLEModelProjectionSolver(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855877, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3559834679453553, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.0, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151985, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8091663380211014, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.21269010120806528, places=8)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.3669964674034123, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.6737192530463552, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 9.592833401286343, places=4)
def test_proj_mle_subjective_model_corruptdata_nonzero_bias(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = SubjectMLEModelProjectionSolver(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855877, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3559834679453553, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.0, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151985, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8091663380211014, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.21269010120806528, places=8)
self.assertAlmostEqual(result['dof'], 0.06377799415774099, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.084797535188502, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2020882040879586, places=6)
self.assertAlmostEqual(result['aic'], 2.297151058692486, places=6)
self.assertAlmostEqual(result['bic'], 2.6560645519514896, places=6)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.3669964674034123, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.6737192530463552, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 9.592833401286343, places=4)
def test_mleco_subjective_model_corruptdata_nonzero_bias(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = MaximumLikelihoodEstimationModelContentOblivious(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5580494278512447, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3559834445021643, places=8)
self.assertAlmostEqual(float(np.mean(bias)), -0.013258775465654477, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151789, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8091663380211014, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.2126900961328451, places=8)
self.assertAlmostEqual(result['dof'], 0.06377799415774099, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.0847975351885024, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2020881956510854, places=6)
self.assertAlmostEqual(result['aic'], 2.297151058692487, places=6)
self.assertAlmostEqual(result['bic'], 2.6560645519514905, places=6)
def test_proj_mle_subjective_model_corruptdata_missingdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
dataset_reader1 = CorruptSubjectRawDatasetReader(dataset, input_dict={'selected_subjects': range(5)})
dataset1 = dataset_reader1.to_dataset()
dataset_reader2 = MissingDataRawDatasetReader(dataset1, input_dict={'missing_probability': 0.0001})
subjective_model = SubjectMLEModelProjectionSolver(dataset_reader2)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5441674307897983, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3557530628643795, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.00011539474984923769, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08879525615906458, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8088220663739162, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.21296014750848657, places=8)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.3663572924335963, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.673273950838568, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 9.589031768667335, places=4)
def test_proj_mle_subjective_model2_corruptdata_nonzero_bias(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
info_dict = {
'selected_subjects': range(5),
}
dataset_reader = CorruptSubjectRawDatasetReader(dataset, input_dict=info_dict)
subjective_model = SubjectMLEModelProjectionSolver2(dataset_reader)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5447906523855877, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3559834679453553, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.0, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08903258562151985, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8091663380211014, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.21269010120806528, places=8)
self.assertAlmostEqual(result['dof'], 0.06377799415774099, places=6)
self.assertAlmostEqual(result['loglikelihood'], -1.084797535188502, places=6)
self.assertAlmostEqual(float(np.std(result['raw_scores'])), 1.3654128030298962, places=6)
self.assertAlmostEqual(float(np.std(result['reconstructions'])), 1.2020882040879586, places=6)
self.assertAlmostEqual(result['aic'], 2.297151058692486, places=6)
self.assertAlmostEqual(result['bic'], 2.6560645519514896, places=6)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.3669964674034123, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.6737192530463552, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 13.711486766043402, places=4)
def test_proj_mle_subjective_model2_corruptdata_missingdata(self):
dataset = import_python_file(self.dataset_filepath)
np.random.seed(0)
dataset_reader1 = CorruptSubjectRawDatasetReader(dataset, input_dict={'selected_subjects': range(5)})
dataset1 = dataset_reader1.to_dataset()
dataset_reader2 = MissingDataRawDatasetReader(dataset1, input_dict={'missing_probability': 0.0001})
subjective_model = SubjectMLEModelProjectionSolver2(dataset_reader2)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
scores = result['quality_scores']
bias = result['observer_bias']
inconsistency = result['observer_inconsistency']
self.assertAlmostEqual(float(np.mean(scores)), 3.5441674307897983, places=8)
self.assertAlmostEqual(float(np.var(scores)), 1.3557530628643795, places=8)
self.assertAlmostEqual(float(np.mean(bias)), 0.00011539474984923769, places=8)
self.assertAlmostEqual(float(np.var(bias)), 0.08879525615906458, places=8)
self.assertAlmostEqual(float(np.mean(inconsistency)), 0.8088220663739162, places=8)
self.assertAlmostEqual(float(np.var(inconsistency)), 0.21296014750848657, places=8)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.3663572924335963, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 1.673273950838568, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 13.712083371807026, places=4)
class SubjectiveModelPartialTest(unittest.TestCase):
def setUp(self):
self.dataset_filepath = SurealConfig.test_resource_path('NFLX_dataset_public_raw_PARTIAL.py')
self.output_dataset_filepath = SurealConfig.workdir_path('NFLX_dataset_public_test_PARTIAL.py')
self.output_dataset_pyc_filepath = SurealConfig.workdir_path('NFLX_dataset_public_test_PARTIAL.pyc')
def tearDown(self):
if os.path.exists(self.output_dataset_filepath):
os.remove(self.output_dataset_filepath)
if os.path.exists(self.output_dataset_pyc_filepath):
os.remove(self.output_dataset_pyc_filepath)
def test_mos_subjective_model(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 4.884615384615385, places=4)
self.assertAlmostEqual(scores[10], 2.8076923076923075, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.4871794871794877, places=4)
scores_std = result['quality_scores_std']
self.assertAlmostEqual(float(np.mean(scores_std)), 0.13125250408357622, places=4)
def test_mos_subjective_model_output(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
subjective_model.run_modeling()
subjective_model.to_aggregated_dataset_file(self.output_dataset_filepath)
self.assertTrue(os.path.exists(self.output_dataset_filepath))
dataset2 = import_python_file(self.output_dataset_filepath)
dis_video = dataset2.dis_videos[0]
self.assertTrue('groundtruth' in dis_video)
self.assertTrue('groundtruth_std' in dis_video)
self.assertTrue('os' not in dis_video)
self.assertAlmostEqual(dis_video['groundtruth'], 4.884615384615385, places=4)
self.assertAlmostEqual(dis_video['groundtruth_std'], 0.06389710663783135, places=4)
def test_mos_subjective_model_normalize_final(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(normalize_final=True)
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 1.1666952279897338, places=4)
self.assertAlmostEqual(scores[10], -0.56729217507757768, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 0.0, places=4)
def test_mos_subjective_model_transform_final(self):
dataset = import_python_file(self.dataset_filepath)
dataset_reader = RawDatasetReader(dataset)
subjective_model = MosModel(dataset_reader)
result = subjective_model.run_modeling(transform_final={'p1': 10, 'p0': 1})
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 49.84615384615385, places=4)
self.assertAlmostEqual(scores[10], 29.076923076923073, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 35.871794871794876, places=4)
def test_from_dataset_file(self):
subjective_model = MosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 4.884615384615385, places=4)
self.assertAlmostEqual(scores[10], 2.8076923076923075, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.4871794871794877, places=4)
def test_dmos_subjective_model(self):
subjective_model = DmosModel.from_dataset_file(self.dataset_filepath)
result = subjective_model.run_modeling()
scores = result['quality_scores']
self.assertAlmostEqual(scores[0], 5.0, places=4)
self.assertAlmostEqual(scores[10], 2.9230769230769225, places=4)
self.assertAlmostEqual(float(np.mean(scores)), 3.7473604826546003, places=4)
scores_std = result['quality_scores_std']
self.assertAlmostEqual(float(np.mean(scores_std)), 0.13125250408357622, places=4)
def test_observer_aware_subjective_model_with_dscoring(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(dscore_mode=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.038360699965619777, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.095605013092265739, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.81030572681315, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.014607671806207905, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 191.1906306037788, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4711930351190119, places=4)
def test_observer_aware_subjective_model_use_log(self):
subjective_model = LegacyMaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(use_log=True, force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.02907696993595069, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.095605013092265725, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.810305727732661, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.014607671851733216, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 177.90318944102833, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4830610455789057, places=4)
def test_observer_content_aware_subjective_model(self):
subjective_model = MaximumLikelihoodEstimationModel.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.nansum(result['content_ambiguity'])), 2.653508643860357, places=4)
self.assertAlmostEqual(float(np.nanvar(result['content_ambiguity'])), 0.0092892978862108271, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.020313188445860726, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.091830942654165318, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 11.232923468639161, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.027721095664357907, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 177.88599894484821, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4896077857605587, places=4)
# self.assertAlmostEqual(np.nansum(result['content_ambiguity_std']), 0.30465244947706538, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_bias_std'])), 2.165903882505483, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency_std'])), 27.520643824238352, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores_std'])), 5.7355563435912256, places=4)
def test_observer_content_aware_subjective_model_nocontent(self):
subjective_model = MaximumLikelihoodEstimationModelContentOblivious.from_dataset_file(
self.dataset_filepath)
result = subjective_model.run_modeling(force_subjbias_zeromean=False)
self.assertAlmostEqual(float(np.sum(result['observer_bias'])), -0.038360699965624648, places=4)
self.assertAlmostEqual(float(np.var(result['observer_bias'])), 0.095605013092265753, places=4)
self.assertAlmostEqual(float(np.sum(result['observer_inconsistency'])), 15.81030572681315, places=4)
self.assertAlmostEqual(float(np.var(result['observer_inconsistency'])), 0.014607671806207895, places=4)
self.assertAlmostEqual(float(np.sum(result['quality_scores'])), 177.92139983454805, places=4)
self.assertAlmostEqual(float(np.var(result['quality_scores'])), 1.4830610442685492, places=4)
if __name__ == '__main__':
unittest.main()
| 59.233216
| 180
| 0.735683
| 7,422
| 67,052
| 6.437079
| 0.062921
| 0.172744
| 0.163806
| 0.176407
| 0.896433
| 0.889233
| 0.88197
| 0.860223
| 0.852583
| 0.838224
| 0
| 0.125528
| 0.149332
| 67,052
| 1,131
| 181
| 59.285588
| 0.712074
| 0.003997
| 0
| 0.671706
| 0
| 0
| 0.089258
| 0.025924
| 0
| 0
| 0
| 0
| 0.462203
| 1
| 0.073434
| false
| 0
| 0.053996
| 0
| 0.12959
| 0.00108
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
24f019c5732ccdba1df85316adffd60469d1b134
| 417
|
py
|
Python
|
desicos/abaqus/utils/__init__.py
|
saullocastro/desicos
|
922db8ac4fb0fb4d09df18ce2a14011f207f6fa8
|
[
"BSD-3-Clause"
] | 1
|
2020-10-22T22:15:24.000Z
|
2020-10-22T22:15:24.000Z
|
desicos/abaqus/utils/__init__.py
|
saullocastro/desicos
|
922db8ac4fb0fb4d09df18ce2a14011f207f6fa8
|
[
"BSD-3-Clause"
] | 1
|
2020-10-09T12:42:02.000Z
|
2020-10-09T12:42:02.000Z
|
desicos/abaqus/utils/__init__.py
|
saullocastro/desicos
|
922db8ac4fb0fb4d09df18ce2a14011f207f6fa8
|
[
"BSD-3-Clause"
] | 2
|
2020-07-14T07:45:31.000Z
|
2020-12-29T00:22:41.000Z
|
r"""
=======================================
Utilities (:mod:`desicos.abaqus.utils`)
=======================================
.. currentmodule:: desicos.abaqus.utils
Includes all utilities functions that can be executed without Abaqus.
.. automodule:: desicos.abaqus.utils.utils
:members:
.. automodule:: desicos.abaqus.utils.geom
:members:
"""
from __future__ import absolute_import
from .utils import *
| 21.947368
| 69
| 0.59952
| 40
| 417
| 6.125
| 0.55
| 0.212245
| 0.293878
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115108
| 417
| 18
| 70
| 23.166667
| 0.663957
| 0.829736
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
702d440e650fdab72a01e3eef548646ec6e400fc
| 3,979
|
py
|
Python
|
tests/test_unit_sql_free.py
|
IBM/python-itoolk
|
36054a7ebdd8f5556c548d4c315e00e3c8d04904
|
[
"MIT"
] | 11
|
2019-01-09T12:31:04.000Z
|
2021-08-29T05:26:35.000Z
|
tests/test_unit_sql_free.py
|
IBM/python-itoolk
|
36054a7ebdd8f5556c548d4c315e00e3c8d04904
|
[
"MIT"
] | 50
|
2018-12-21T18:52:25.000Z
|
2021-05-25T13:38:15.000Z
|
tests/test_unit_sql_free.py
|
IBM/python-itoolk
|
36054a7ebdd8f5556c548d4c315e00e3c8d04904
|
[
"MIT"
] | 9
|
2018-12-25T00:02:19.000Z
|
2022-02-22T00:58:13.000Z
|
import xml.etree.ElementTree as ET
from itoolkit import iSqlFree
def test_sql_free():
key = 'ifaovjuf'
element = ET.fromstring(iSqlFree(key).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 2)
assert('error' in element.attrib)
assert(element.attrib['error'] == 'fast')
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
def test_sql_free_error_on():
key = 'nkcfhgwf'
error = 'on'
element = ET.fromstring(iSqlFree(key, {'error': error}).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 2)
assert('error' in element.attrib)
assert(element.attrib['error'] == error)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
def test_sql_free_error_off():
key = 'vzumvoan'
error = 'off'
element = ET.fromstring(iSqlFree(key, {'error': error}).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 2)
assert('error' in element.attrib)
assert(element.attrib['error'] == error)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
def test_sql_free_conn_set():
key = 'igqywtcq'
conn = 'conn-label'
element = ET.fromstring(iSqlFree(key, {'conn': conn}).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 3)
assert('conn' in element.attrib)
assert(element.attrib['conn'] == conn)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
def test_sql_free_stmt_set():
key = 'tofzlwxz'
stmt = 'stmt-label'
element = ET.fromstring(iSqlFree(key, {'stmt': stmt}).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 3)
assert('stmt' in element.attrib)
assert(element.attrib['stmt'] == stmt)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
def test_sql_free_options_set():
key = 'poraowkq'
options = 'options-label'
element = ET.fromstring(iSqlFree(key, {'options': options}).xml_in())
assert(element.tag == 'sql')
assert(len(element.attrib) == 1)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
assert(element.text == '\n')
children = tuple(iter(element))
assert(len(children) == 1)
element = children[0]
assert(element.tag == 'free')
assert(len(element.attrib) == 3)
assert('options' in element.attrib)
assert(element.attrib['options'] == options)
assert('var' in element.attrib)
assert(element.attrib['var'] == key)
| 21.743169
| 73
| 0.622016
| 499
| 3,979
| 4.903808
| 0.092184
| 0.255006
| 0.110339
| 0.154475
| 0.882305
| 0.870045
| 0.785452
| 0.785452
| 0.785452
| 0.785452
| 0
| 0.007573
| 0.203569
| 3,979
| 182
| 74
| 21.862637
| 0.764595
| 0
| 0
| 0.724771
| 0
| 0
| 0.075647
| 0
| 0
| 0
| 0
| 0
| 0.66055
| 1
| 0.055046
| false
| 0
| 0.018349
| 0
| 0.073395
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
703055e7c1856b6f52483d241c6655f8fae4ffd8
| 6,858
|
py
|
Python
|
tests/do_sampler/test_pandas_do_api.py
|
mbenezra/dowhy
|
99bf1b2b365a0a03f9f9fcd4787da28801ceb1d0
|
[
"MIT"
] | 1
|
2019-10-23T01:18:52.000Z
|
2019-10-23T01:18:52.000Z
|
tests/do_sampler/test_pandas_do_api.py
|
mbenezra/dowhy
|
99bf1b2b365a0a03f9f9fcd4787da28801ceb1d0
|
[
"MIT"
] | null | null | null |
tests/do_sampler/test_pandas_do_api.py
|
mbenezra/dowhy
|
99bf1b2b365a0a03f9f9fcd4787da28801ceb1d0
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
import dowhy.datasets
import dowhy.api
from sklearn.linear_model import LinearRegression
class TestPandasDoAPI(object):
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_discrete_cause_continuous_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N)
v = (np.random.normal(size=N) + X0).astype(int)
y = data['ate']*v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'd', 'X0': 'c', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = (causal_df[causal_df.v == 1].mean() \
- causal_df[causal_df.v == 0].mean())['y']
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_discrete_cause_discrete_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N).astype(int)
v = (np.random.normal(size=N) + X0).astype(int)
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'd', 'X0': 'd', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = (causal_df[causal_df.v == 1].mean() \
- causal_df[causal_df.v == 0].mean())['y']
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_continuous_cause_discrete_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N).astype(int)
v = np.random.normal(size=N) + X0
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'c', 'X0': 'd', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = LinearRegression().fit(causal_df[['v']], causal_df['y']).coef_[0]
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_continuous_cause_continuous_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N)
v = np.random.normal(size=N) + X0
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'c', 'X0': 'c', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = LinearRegression().fit(causal_df[['v']], causal_df['y']).coef_[0]
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
| 43.132075
| 95
| 0.466025
| 724
| 6,858
| 4.25
| 0.120166
| 0.072798
| 0.054599
| 0.041599
| 0.957751
| 0.957751
| 0.957751
| 0.957751
| 0.957751
| 0.957751
| 0
| 0.02586
| 0.402304
| 6,858
| 158
| 96
| 43.405063
| 0.724811
| 0
| 0
| 0.874126
| 0
| 0.027972
| 0.078594
| 0.012832
| 0
| 0
| 0
| 0
| 0.027972
| 1
| 0.027972
| false
| 0
| 0.034965
| 0
| 0.06993
| 0.048951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
705055252f9e7eaa663f418316d7801905abae9e
| 338
|
py
|
Python
|
Lib/site-packages/QtModularUiPack/Framework/Extensions/__init__.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 3
|
2019-11-11T12:09:23.000Z
|
2022-02-17T10:02:55.000Z
|
QtModularUiPack/Framework/Extensions/__init__.py
|
dowerner/QtModularUiPack
|
de2ce6ba3a1cd52ca00eaea3ea3bb2247fe76ba3
|
[
"Apache-2.0"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
Lib/site-packages/QtModularUiPack/Framework/Extensions/__init__.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 2
|
2019-11-11T12:09:31.000Z
|
2019-11-11T12:09:42.000Z
|
from .signal import Signal
from QtModularUiPack.Framework.Extensions.observable_list import ObservableList
from QtModularUiPack.Framework.Extensions.singleton import Singleton
from QtModularUiPack.Framework.Extensions.code_environment import CodeEnvironment
from QtModularUiPack.Framework.Extensions.killable_thread import KillableThread
| 56.333333
| 81
| 0.902367
| 35
| 338
| 8.628571
| 0.457143
| 0.251656
| 0.370861
| 0.503311
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059172
| 338
| 5
| 82
| 67.6
| 0.949686
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
560e9c8aa23c6b3905eeb6ae937d8c7674638989
| 4,258
|
py
|
Python
|
tests/test_dale_chall_score.py
|
atvaccaro/homer
|
c19b08bca6a783041b1e9f2ee8ab7d392ab4626b
|
[
"MIT"
] | 660
|
2019-08-11T08:16:29.000Z
|
2022-03-08T08:03:01.000Z
|
tests/test_dale_chall_score.py
|
atvaccaro/homer
|
c19b08bca6a783041b1e9f2ee8ab7d392ab4626b
|
[
"MIT"
] | 8
|
2019-08-15T20:40:54.000Z
|
2021-09-29T17:41:45.000Z
|
tests/test_dale_chall_score.py
|
atvaccaro/homer
|
c19b08bca6a783041b1e9f2ee8ab7d392ab4626b
|
[
"MIT"
] | 41
|
2019-08-15T18:33:00.000Z
|
2022-03-24T19:28:39.000Z
|
import unittest
from homer.analyzer import DaleChall
class TestDaleChallReadingScore(unittest.TestCase):
def test_fourth_grade_or_lower(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 4th grade student or lower'
dale_chall.score = 4.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 4.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 3.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 2.5
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 1.5
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 0
self.assertEqual(grade_label, dale_chall.grade())
def test_fifth_or_sixth_grade(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 5th or 6th grade student'
dale_chall.score = 5.0
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 5.1
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 5.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 5.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 5.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 6.0
self.assertNotEqual(grade_label, dale_chall.grade())
def test_seventh_or_eigth(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 7th or 8th grade student'
dale_chall.score = 6.0
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 6.1
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 6.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 6.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 6.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 7.0
self.assertNotEqual(grade_label, dale_chall.grade())
def test_nine_or_tenth(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 9th or 10th grade student'
dale_chall.score = 7.0
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 7.1
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 7.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 7.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 7.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 8.0
self.assertNotEqual(grade_label, dale_chall.grade())
def test_eleventh_or_twelve(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 11th or 12th grade student'
dale_chall.score = 8.0
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 8.1
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 8.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 8.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 8.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 9.0
self.assertNotEqual(grade_label, dale_chall.grade())
def test_thirteenth_or_fifteen(self):
dale_chall = DaleChall("Some dummy text")
grade_label = 'Average 13th or 15th grade student'
dale_chall.score = 9.0
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 9.1
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 9.4
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 9.8
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 9.9
self.assertEqual(grade_label, dale_chall.grade())
dale_chall.score = 10.0
self.assertEqual(grade_label, dale_chall.grade())
if __name__ == "__main__":
unittest.main()
| 41.339806
| 60
| 0.664631
| 569
| 4,258
| 4.713533
| 0.108963
| 0.261745
| 0.187919
| 0.255034
| 0.886279
| 0.858688
| 0.836316
| 0.833706
| 0.803878
| 0.803878
| 0
| 0.026928
| 0.232504
| 4,258
| 103
| 61
| 41.339806
| 0.793758
| 0
| 0
| 0.526316
| 0
| 0
| 0.069735
| 0
| 0
| 0
| 0
| 0
| 0.378947
| 1
| 0.063158
| false
| 0
| 0.021053
| 0
| 0.094737
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5662b8f849522c6126442d70f36d6b6742ea6f75
| 239
|
py
|
Python
|
src/server/views/__init__.py
|
daniel3303/sirs-project
|
38a36ecf2373775c3a866f185dacb7597ad1e3cc
|
[
"Apache-2.0"
] | null | null | null |
src/server/views/__init__.py
|
daniel3303/sirs-project
|
38a36ecf2373775c3a866f185dacb7597ad1e3cc
|
[
"Apache-2.0"
] | null | null | null |
src/server/views/__init__.py
|
daniel3303/sirs-project
|
38a36ecf2373775c3a866f185dacb7597ad1e3cc
|
[
"Apache-2.0"
] | null | null | null |
from server.views.UserView import *
from server.views.UserCreateView import *
from server.views.FileListView import *
from server.views.FileCreateView import *
from server.views.FileView import *
from server.views.FileRolesView import *
| 26.555556
| 41
| 0.8159
| 30
| 239
| 6.5
| 0.333333
| 0.307692
| 0.461538
| 0.538462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108787
| 239
| 8
| 42
| 29.875
| 0.915493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
569d8969a5bf0bbcb1c2b8aacbd37509d1be6d5f
| 10,273
|
py
|
Python
|
exhaustive.py
|
ForeverZyh/diffai
|
91ca6c002e6fdc89fe98389f966ddde6c7acbf23
|
[
"MIT"
] | null | null | null |
exhaustive.py
|
ForeverZyh/diffai
|
91ca6c002e6fdc89fe98389f966ddde6c7acbf23
|
[
"MIT"
] | null | null | null |
exhaustive.py
|
ForeverZyh/diffai
|
91ca6c002e6fdc89fe98389f966ddde6c7acbf23
|
[
"MIT"
] | null | null | null |
import numpy as np
import itertools
import torch
from utils import swap_pytorch
from dataset.dataset_loader import SSTWordLevel, Glove
from nltk import pos_tag
from DSL.Alphabet import Alphabet
import diffai.scheduling as S
def SwapSub(a, b, x, is_numpy=False, batch_size=64, truncate=None):
adjacent_keys = S.Info.adjacent_keys
if not is_numpy:
x = x.cpu()
X = []
else:
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
if truncate is None:
truncated_len = len(x)
else:
truncated_len = truncate
valid_swap_poss = [i for i in range(truncated_len - 1) if int(x[i]) != int(x[i + 1])]
for swap in range(a, -1, -1):
for swap_poss in itertools.combinations(tuple(valid_swap_poss), swap):
# precheck whether overlape
overlape = False
for i in range(len(swap_poss) - 1):
if swap_poss[i + 1] - swap_poss[i] == 1:
overlape = True
if overlape:
continue
valid_sub_poss = [i for i in range(truncated_len) if (i not in swap_poss) and (i - 1 not in swap_poss) and len(adjacent_keys[int(x[i])]) > 0]
for sub in range(b, -1, -1):
for sub_poss in itertools.combinations(tuple(valid_sub_poss), sub):
if is_numpy:
x2 = X[current_id]
for swap_pos in swap_poss:
x2[swap_pos], x2[swap_pos + 1] = x2[swap_pos + 1], x2[swap_pos]
else:
x2 = x.clone()
for swap_pos in swap_poss:
swap_pytorch(x2, swap_pos, swap_pos + 1)
for sub_pos in sub_poss:
x2[sub_pos] = adjacent_keys[int(x[sub_pos])][0]
if is_numpy:
current_id += 1
if current_id >= batch_size:
yield X
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
else:
X.append(x2.unsqueeze(0))
if len(X) == batch_size:
yield torch.cat(X, 0).cuda()
X = []
if len(X) > 0:
if is_numpy:
yield X
else:
yield torch.cat(X, 0).cuda()
def DelDupSubChar(a, b, c, x, is_numpy=False, batch_size=64, padding_id=0, truncate=None):
adjacent_keys = S.Info.adjacent_keys
if not is_numpy:
x = x.cpu()
X = []
else:
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
end_pos = len(x)
while end_pos > 0 and int(x[end_pos - 1]) == padding_id:
end_pos -= 1
if truncate is None:
truncated_len = end_pos
else:
truncated_len = min(end_pos, truncate)
valid_sub_poss = [i for i in range(truncated_len) if len(adjacent_keys[int(x[i])]) > 0]
for sub in range(c, -1, -1):
for sub_poss in itertools.combinations(tuple(valid_sub_poss), sub):
sub_pos_strs = []
for sub_pos in sub_poss:
sub_pos_strs.append(adjacent_keys[int(x[sub_pos])])
for sub_pos_str in itertools.product(*sub_pos_strs):
if is_numpy:
x3 = x.copy()
else:
x3 = x.clone()
for i, sub_pos in enumerate(sub_poss):
x3[sub_pos] = sub_pos_str[i]
valid_dup_poss = [i for i in range(truncated_len) if i not in sub_poss and len(adjacent_keys[int(x[i])]) > 0]
for dup in range(b, -1, -1):
for dup_poss in itertools.combinations(tuple(valid_dup_poss), dup):
valid_del_poss = [i for i in range(truncated_len) if (i not in dup_poss) and (i not in sub_poss)]
for delete in range(a, -1, -1):
for del_poss in itertools.combinations(tuple(valid_del_poss), delete):
if is_numpy:
x2 = X[current_id]
else:
x2 = x.clone()
copy_point = 0
paste_point = 0
while copy_point < end_pos and paste_point < end_pos:
if copy_point in dup_poss:
x2[paste_point] = x3[copy_point]
paste_point += 1
if paste_point < end_pos:
x2[paste_point] = adjacent_keys[int(x3[copy_point])][0]
paste_point += 1
copy_point += 1
elif copy_point in del_poss:
copy_point += 1
else:
x2[paste_point] = x3[copy_point]
paste_point += 1
copy_point += 1
while paste_point < end_pos:
x2[paste_point] = padding_id
paste_point += 1
if is_numpy:
current_id += 1
if current_id >= batch_size:
yield X
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
else:
X.append(x2.unsqueeze(0))
if len(X) == batch_size:
yield torch.cat(X, 0).cuda()
X = []
if len(X) > 0:
if is_numpy:
yield X
else:
yield torch.cat(X, 0).cuda()
def DelDupSubWord(a, b, c, x, is_numpy=False, batch_size=64, del_set={"a", "and", "the", "of", "to"}, padding_id=0):
SSTWordLevel.build()
if not is_numpy:
x = x.cpu()
X = []
else:
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
end_pos = len(x)
while end_pos > 0 and int(x[end_pos - 1]) == padding_id:
end_pos -= 1
valid_sub_poss = [i for i in range(end_pos) if int(x[i]) in SSTWordLevel.synonym_dict_id]
input_pos_tag = pos_tag(Alphabet.to_string(x.long() if not is_numpy else x, True))
for sub in range(c, -1, -1):
for sub_poss in itertools.combinations(tuple(valid_sub_poss), sub):
sub_pos_strs = []
for sub_pos in sub_poss:
sub_pos_strs.append([])
for k in range(len(SSTWordLevel.synonym_dict_id[int(x[sub_pos])])):
if SSTWordLevel.synonym_dict_pos_tag[int(x[sub_pos])][k] == input_pos_tag[sub_pos][1]:
sub_pos_strs[-1].append(SSTWordLevel.synonym_dict_id[int(x[sub_pos])][k])
for sub_pos_str in itertools.product(*sub_pos_strs):
if is_numpy:
x3 = x.copy()
else:
x3 = x.clone()
for i, sub_pos in enumerate(sub_poss):
x3[sub_pos] = sub_pos_str[i]
valid_dup_poss = [i for i in range(end_pos) if i not in sub_poss]
for dup in range(b, -1, -1):
for dup_poss in itertools.combinations(tuple(valid_dup_poss), dup):
valid_del_poss = [i for i in range(end_pos) if (i not in dup_poss) and (i not in sub_poss) and Glove.id2str[int(x[i])] in del_set]
for delete in range(a, -1, -1):
for del_poss in itertools.combinations(tuple(valid_del_poss), delete):
if is_numpy:
x2 = X[current_id]
else:
x2 = x.clone()
copy_point = 0
paste_point = 0
while copy_point < end_pos and paste_point < end_pos:
if copy_point in dup_poss:
x2[paste_point] = x3[copy_point]
paste_point += 1
if paste_point < end_pos:
x2[paste_point] = x3[copy_point]
paste_point += 1
copy_point += 1
elif copy_point in del_poss:
copy_point += 1
else:
x2[paste_point] = x3[copy_point]
paste_point += 1
copy_point += 1
while paste_point < end_pos:
x2[paste_point] = padding_id
paste_point += 1
if is_numpy:
current_id += 1
if current_id >= batch_size:
yield X
X = np.tile(np.expand_dims(x, 0), (batch_size, 1))
current_id = 0
else:
X.append(x2.unsqueeze(0))
if len(X) == batch_size:
yield torch.cat(X, 0).cuda()
X = []
if len(X) > 0:
if is_numpy:
yield X
else:
yield torch.cat(X, 0).cuda()
| 46.695455
| 154
| 0.42529
| 1,185
| 10,273
| 3.458228
| 0.087764
| 0.036603
| 0.024158
| 0.024158
| 0.810639
| 0.80039
| 0.746218
| 0.72816
| 0.702294
| 0.696437
| 0
| 0.025494
| 0.492164
| 10,273
| 219
| 155
| 46.908676
| 0.760015
| 0.002434
| 0
| 0.789216
| 0
| 0
| 0.001074
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.014706
| false
| 0
| 0.039216
| 0
| 0.053922
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3b6c09ae1174e5fd7a8967227f395e209ae1b6b7
| 6,484
|
py
|
Python
|
tests/test_async_validators.py
|
omarryhan/sanic-wtf
|
41c24f061fa16652a82d83753c3bee56f746e23a
|
[
"BSD-3-Clause"
] | 3
|
2019-04-11T11:01:54.000Z
|
2020-03-09T12:19:26.000Z
|
tests/test_async_validators.py
|
omarryhan/sanic-wtf
|
41c24f061fa16652a82d83753c3bee56f746e23a
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_async_validators.py
|
omarryhan/sanic-wtf
|
41c24f061fa16652a82d83753c3bee56f746e23a
|
[
"BSD-3-Clause"
] | null | null | null |
import re
import asyncio
from sanic import response
from wtforms.validators import DataRequired, Length, ValidationError
from wtforms import FileField, StringField, SubmitField
from sanic_wtf import SanicForm, to_bytes
from .helpers import render_form, csrf_token_pattern
def test_async_validators_with_csrf(
app,
async_validator_conditionally_fail
):
app.config['WTF_CSRF_SECRET_KEY'] = 'top secret !!!'
class TestForm(SanicForm):
msg = StringField('Note', validators=[
DataRequired(),
Length(max=10),
async_validator_conditionally_fail
])
submit = SubmitField('Submit')
@app.route('/', methods=['POST'])
async def index(request):
form = TestForm(request)
if not await form.validate_on_submit_async():
return response.text(
str(form.errors)
)
else:
return response.text('valid')
@app.route('/', methods=['GET'])
async def index_(request):
form = TestForm(request)
content = render_form(form)
return response.html(content)
req, resp = app.test_client.get('/')
assert resp.status == 200
assert 'csrf_token' in resp.text
token = re.findall(csrf_token_pattern, resp.text)[0]
assert token
payload = {'msg': 'happy', 'csrf_token': token}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'valid' in resp.text
def test_two_async_validators(
app,
async_validator_conditionally_fail,
async_validator_always_pass
):
app.config['WTF_CSRF_ENABLED'] = False
class TestForm(SanicForm):
msg = StringField('Note', validators=[
DataRequired(),
Length(max=10),
async_validator_conditionally_fail,
async_validator_always_pass
])
submit = SubmitField('Submit')
@app.route('/', methods=['POST'])
async def index(request):
form = TestForm(request)
if not await form.validate_on_submit_async():
return response.text('invalid')
else:
return response.text('valid')
@app.route('/', methods=['GET'])
async def index_(request):
form = TestForm(request)
content = render_form(form)
return response.html(content)
req, resp = app.test_client.get('/')
assert resp.status == 200
payload = {'msg': 'fail'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'invalid' in resp.text
payload = {'msg': 'pass'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'valid' in resp.text
def test_async_with_sync_validators_fail(
app,
async_validator_conditionally_fail,
async_validator_always_pass,
sync_validator_always_fail
):
app.config['WTF_CSRF_ENABLED'] = False
class TestForm(SanicForm):
msg = StringField('Note', validators=[
DataRequired(),
Length(max=10),
async_validator_conditionally_fail,
async_validator_always_pass,
sync_validator_always_fail
])
submit = SubmitField('Submit')
@app.route('/', methods=['POST'])
async def index(request):
form = TestForm(request)
if not await form.validate_on_submit_async():
return response.text('invalid')
else:
return response.text('valid')
@app.route('/', methods=['GET'])
async def index_(request):
form = TestForm(request)
content = render_form(form)
return response.html(content)
req, resp = app.test_client.get('/')
assert resp.status == 200
payload = {'msg': 'fail'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'invalid' in resp.text
payload = {'msg': 'pass'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'invalid' in resp.text
def test_async_with_sync_validators_conditionally_fail(
app,
async_validator_conditionally_fail,
async_validator_always_pass,
sync_validator_conditionally_fail
):
app.config['WTF_CSRF_ENABLED'] = False
class TestForm(SanicForm):
msg = StringField('Note', validators=[
DataRequired(),
Length(max=10),
async_validator_conditionally_fail,
async_validator_always_pass,
sync_validator_conditionally_fail
])
submit = SubmitField('Submit')
@app.route('/', methods=['POST'])
async def index(request):
form = TestForm(request)
if not await form.validate_on_submit_async():
return response.text('invalid')
else:
return response.text('valid')
@app.route('/', methods=['GET'])
async def index_(request):
form = TestForm(request)
content = render_form(form)
return response.html(content)
req, resp = app.test_client.get('/')
assert resp.status == 200
payload = {'msg': 'fail'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'invalid' in resp.text
payload = {'msg': 'pass'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'valid' in resp.text
def test_async_and_sync_stock_validator(
app,
async_validator_conditionally_fail,
async_validator_always_pass
):
app.config['WTF_CSRF_ENABLED'] = False
class TestForm(SanicForm):
msg = StringField('Note', validators=[
DataRequired(),
Length(max=2), # <--
async_validator_conditionally_fail,
async_validator_always_pass
])
submit = SubmitField('Submit')
@app.route('/', methods=['POST'])
async def index(request):
form = TestForm(request)
if not await form.validate_on_submit_async():
return response.text('invalid')
else:
return response.text('valid')
@app.route('/', methods=['GET'])
async def index_(request):
form = TestForm(request)
content = render_form(form)
return response.html(content)
req, resp = app.test_client.get('/')
assert resp.status == 200
payload = {'msg': 'fail'}
req, resp = app.test_client.post('/', data=payload)
assert resp.status == 200
assert 'invalid' in resp.text
| 29.076233
| 68
| 0.626311
| 731
| 6,484
| 5.354309
| 0.114911
| 0.064384
| 0.033214
| 0.0465
| 0.887328
| 0.883751
| 0.883751
| 0.87302
| 0.87302
| 0.860756
| 0
| 0.010143
| 0.254935
| 6,484
| 222
| 69
| 29.207207
| 0.800041
| 0.000463
| 0
| 0.887097
| 0
| 0
| 0.059423
| 0
| 0
| 0
| 0
| 0
| 0.123656
| 1
| 0.026882
| false
| 0.05914
| 0.037634
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
8e60fe774b6458843678291cf98a4db1a291a01a
| 2,306
|
py
|
Python
|
tests/test_preprocessing_binarize.py
|
abcnishant007/sklearn-evaluation
|
77ff2da43097b0451d8cf6f95c534409f612bf6a
|
[
"MIT"
] | 351
|
2016-01-27T19:15:27.000Z
|
2022-03-09T15:40:56.000Z
|
tests/test_preprocessing_binarize.py
|
abcnishant007/sklearn-evaluation
|
77ff2da43097b0451d8cf6f95c534409f612bf6a
|
[
"MIT"
] | 37
|
2016-03-16T03:57:59.000Z
|
2021-06-26T14:02:33.000Z
|
tests/test_preprocessing_binarize.py
|
abcnishant007/sklearn-evaluation
|
77ff2da43097b0451d8cf6f95c534409f612bf6a
|
[
"MIT"
] | 30
|
2016-01-27T19:27:08.000Z
|
2022-03-31T06:09:59.000Z
|
from random import shuffle
from unittest import TestCase
import numpy as np
from sklearn_evaluation.preprocessing import binarize
class Test_binarize_scores_at_top_proportion(TestCase):
def setUp(self):
self.scores = np.array(
[1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1])
def test_at_10(self):
binary_scores = binarize.scores_at_top_proportion(self.scores, 0.1)
expected = np.array([1, 0, 0, 0, 0, 0, 0, 0, 0, 0])
np.testing.assert_equal(binary_scores, expected)
def test_at_50(self):
binary_scores = binarize.scores_at_top_proportion(self.scores, 0.5)
expected = np.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
np.testing.assert_equal(binary_scores, expected)
def test_at_100(self):
binary_scores = binarize.scores_at_top_proportion(self.scores, 1.0)
expected = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
np.testing.assert_equal(binary_scores, expected)
def test_proportion_less_than_zero(self):
self.assertRaises(ValueError, binarize.scores_at_top_proportion,
self.scores, -0.1)
def test_proportion_more_than_one(self):
self.assertRaises(
ValueError, binarize.scores_at_top_proportion, self.scores,
top_proportion=1.1)
class Test_cutoff_score_at_top_proportion(TestCase):
def setUp(self):
self.scores = np.array(
[1.0, 0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3, 0.2, 0.1])
shuffle(self.scores)
def test_at_10(self):
threshold = binarize.cutoff_score_at_top_proportion(self.scores, 0.1)
self.assertEqual(threshold, 1.0)
def test_at_50(self):
threshold = binarize.cutoff_score_at_top_proportion(self.scores, 0.5)
self.assertEqual(threshold, 0.6)
def test_at_100(self):
threshold = binarize.cutoff_score_at_top_proportion(self.scores, 1.0)
self.assertEqual(threshold, 0.1)
def test_proportion_less_than_zero(self):
self.assertRaises(ValueError, binarize.cutoff_score_at_top_proportion,
self.scores, -0.1)
def test_proportion_more_than_one(self):
self.assertRaises(
ValueError, binarize.cutoff_score_at_top_proportion, self.scores,
top_proportion=1.1)
| 34.41791
| 78
| 0.660885
| 344
| 2,306
| 4.18314
| 0.151163
| 0.020848
| 0.125087
| 0.132036
| 0.839472
| 0.781098
| 0.780403
| 0.776234
| 0.749131
| 0.692842
| 0
| 0.061832
| 0.228534
| 2,306
| 66
| 79
| 34.939394
| 0.747049
| 0
| 0
| 0.520833
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.208333
| 1
| 0.25
| false
| 0
| 0.083333
| 0
| 0.375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8ec6736d05774df57ed49e00db00eaadb0214306
| 107
|
py
|
Python
|
tests/functional/conftest.py
|
tomchuk/meetup_20160428
|
9879e42c7535c6af9bee10697c6fdb046b63473d
|
[
"MIT"
] | null | null | null |
tests/functional/conftest.py
|
tomchuk/meetup_20160428
|
9879e42c7535c6af9bee10697c6fdb046b63473d
|
[
"MIT"
] | null | null | null |
tests/functional/conftest.py
|
tomchuk/meetup_20160428
|
9879e42c7535c6af9bee10697c6fdb046b63473d
|
[
"MIT"
] | null | null | null |
from tests.fixtures import *
from tests.functional.fixtures import *
from tests.functional.steps import *
| 21.4
| 39
| 0.803738
| 14
| 107
| 6.142857
| 0.428571
| 0.313953
| 0.418605
| 0.534884
| 0.767442
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121495
| 107
| 4
| 40
| 26.75
| 0.914894
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d93622bc22ad14c6a0f89f8bd5de8138c935b144
| 1,902
|
py
|
Python
|
test_pypi_releases.py
|
PetitLepton/pypi_releases
|
6746d98c627a3c1d71626a751e2a8a98be5de0b9
|
[
"MIT"
] | null | null | null |
test_pypi_releases.py
|
PetitLepton/pypi_releases
|
6746d98c627a3c1d71626a751e2a8a98be5de0b9
|
[
"MIT"
] | null | null | null |
test_pypi_releases.py
|
PetitLepton/pypi_releases
|
6746d98c627a3c1d71626a751e2a8a98be5de0b9
|
[
"MIT"
] | null | null | null |
from pypi_releases import (
extract_package_name_and_version,
extract_all_packages_names_and_versions,
)
def test_extract_package_name_and_version():
package = "request=1.0.0"
expected_name, expected_version = "request", "1.0.0"
output_name, output_version = extract_package_name_and_version(package)
assert output_name == expected_name
assert output_version == expected_version
package = "request==1.0.0"
expected_name, expected_version = "request", "1.0.0"
output_name, output_version = extract_package_name_and_version(package)
assert output_name == expected_name
assert output_version == expected_version
package = "request"
expected_name, expected_version = "request", "Not provided"
output_name, output_version = extract_package_name_and_version(package)
assert output_name == expected_name
assert output_version == expected_version
def test_extract_all_packages_names_and_versions():
file_content = """
name: env
channels:
- conda-forge
- defaults
dependencies:
- python=3.8.0
"""
expected = ["python=3.8.0"]
output = extract_all_packages_names_and_versions(file_content)
assert output == expected
file_content = """
name: env
channels:
- conda-forge
- defaults
dependencies:
- pip:
- request==1.0.0
"""
expected = ["request==1.0.0"]
output = extract_all_packages_names_and_versions(file_content)
assert output == expected
file_content = """
name: env
channels:
- conda-forge
- defaults
dependencies:
- python=3.8.0
- pip=20.0.0
- pip:
- request==1.0.0
"""
expected = ["python=3.8.0", "pip=20.0.0", "request==1.0.0"]
output = extract_all_packages_names_and_versions(file_content)
assert output == expected
| 26.416667
| 75
| 0.665615
| 233
| 1,902
| 5.098712
| 0.154506
| 0.016835
| 0.060606
| 0.06734
| 0.960438
| 0.908249
| 0.813131
| 0.813131
| 0.765152
| 0.765152
| 0
| 0.030158
| 0.232913
| 1,902
| 71
| 76
| 26.788732
| 0.784099
| 0
| 0
| 0.758621
| 0
| 0
| 0.299685
| 0
| 0
| 0
| 0
| 0
| 0.155172
| 1
| 0.034483
| false
| 0
| 0.017241
| 0
| 0.051724
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d93ff252dfc84765e93c6e76a12e9930edc9e6db
| 135,598
|
py
|
Python
|
tests/resources/test_full_game_possessions.py
|
bahalbach/pbpstats
|
6a9f602764edb7a3ee0e880fffbb5aa34990d6e9
|
[
"MIT"
] | 54
|
2019-10-16T00:10:51.000Z
|
2022-03-19T21:21:05.000Z
|
tests/resources/test_full_game_possessions.py
|
bahalbach/pbpstats
|
6a9f602764edb7a3ee0e880fffbb5aa34990d6e9
|
[
"MIT"
] | 15
|
2019-11-19T01:20:52.000Z
|
2022-02-04T13:38:37.000Z
|
tests/resources/test_full_game_possessions.py
|
bahalbach/pbpstats
|
6a9f602764edb7a3ee0e880fffbb5aa34990d6e9
|
[
"MIT"
] | 15
|
2019-11-19T11:54:51.000Z
|
2022-03-21T05:08:53.000Z
|
import pbpstats
from pbpstats.client import Client
class TestFullGamePossessions:
settings = {
"dir": "tests/data",
"Possessions": {"source": "file", "data_provider": "stats_nba"},
}
client = Client(settings)
game = client.Game("0021600270")
def test_first_possession(self):
assert self.game.possessions.items[0].start_time == "12:00"
assert (
self.game.possessions.items[0].possession_start_type
== pbpstats.OFF_DEADBALL_STRING
)
assert self.game.possessions.items[0].start_score_margin == 0
assert self.game.possessions.items[0].events[-1].score_margin == 0
expected_shot_data = {
"PlayerId": 202693,
"TeamId": 1610612764,
"OpponentTeamId": 1610612760,
"LineupId": "101162-202322-202693-203078-203490",
"OpponentLineupId": "1627734-201566-203460-203500-203506",
"Made": True,
"X": -19,
"Y": 120,
"Time": 699,
"ShotValue": 2,
"Assisted": False,
"Putback": False,
"ShotType": "ShortMidRange",
"ScoreMargin": 0,
"EventNum": 2,
"IsAnd1": False,
}
assert self.game.possessions.items[0].events[-1].shot_data == expected_shot_data
def test_off_short_mid_range_make(self):
assert (
self.game.possessions.items[1].possession_start_type
== f"Off{pbpstats.SHORT_MID_RANGE_STRING}{pbpstats.MAKE_STRING}"
)
assert (
self.game.possessions.items[1].previous_possession_end_shooter_player_id
== 202693
)
assert (
self.game.possessions.items[1].previous_possession_end_rebound_player_id
== 0
)
assert (
self.game.possessions.items[1].previous_possession_end_turnover_player_id
== 0
)
assert (
self.game.possessions.items[1].previous_possession_end_steal_player_id == 0
)
assert self.game.possessions.items[1].start_score_margin == -2
assert self.game.possessions.items[1].events[0].score_margin == -2
def test_off_arc3_miss(self):
assert (
self.game.possessions.items[3].possession_start_type
== f"Off{pbpstats.ARC_3_STRING}{pbpstats.MISS_STRING}"
)
assert (
self.game.possessions.items[3].previous_possession_end_shooter_player_id
== 202322
)
assert (
self.game.possessions.items[3].previous_possession_end_rebound_player_id
== 1627734
)
expected_shot_data = {
"PlayerId": 203500,
"TeamId": 1610612760,
"OpponentTeamId": 1610612764,
"LineupId": "1627734-201566-203460-203500-203506",
"OpponentLineupId": "101162-202322-202693-203078-203490",
"Made": False,
"X": 27,
"Y": 57,
"Time": 644.0,
"ShotValue": 2,
"SecondsSinceOReb": 6.0,
"OrebShotPlayerId": 201566,
"OrebReboundPlayerId": 0,
"OrebShotType": "Team",
"Blocked": False,
"Putback": False,
"ShotType": "ShortMidRange",
"ScoreMargin": 0,
"EventNum": 10,
"IsAnd1": False,
}
assert self.game.possessions.items[3].events[-2].shot_data == expected_shot_data
def test_off_short_mid_range_miss_start_type(self):
assert (
self.game.possessions.items[4].possession_start_type
== f"Off{pbpstats.SHORT_MID_RANGE_STRING}{pbpstats.MISS_STRING}"
)
def test_off_live_ball_turnover(self):
assert (
self.game.possessions.items[5].possession_start_type
== pbpstats.OFF_LIVE_BALL_TURNOVER_STRING
)
assert (
self.game.possessions.items[5].previous_possession_end_shooter_player_id
== 0
)
assert (
self.game.possessions.items[5].previous_possession_end_rebound_player_id
== 0
)
assert (
self.game.possessions.items[5].previous_possession_end_turnover_player_id
== 202693
)
assert (
self.game.possessions.items[5].previous_possession_end_steal_player_id
== 1627734
)
def test_dead_ball_turnover_start_type(self):
assert (
self.game.possessions.items[20].possession_start_type
== pbpstats.OFF_DEADBALL_STRING
)
def test_off_timeout_start_type(self):
assert (
self.game.possessions.items[23].possession_start_type
== pbpstats.OFF_TIMEOUT_STRING
)
def test_team_rebound_start_type(self):
assert (
self.game.possessions.items[24].possession_start_type
== pbpstats.OFF_DEADBALL_STRING
)
def test_second_chance_possession(self):
stats = self.game.possessions.items[69].possession_stats
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-201977-202322-202693-203078",
"opponent_lineup_id": "201566-202683-203460-203506-203924",
"stat_key": "SecondChanceDefPoss",
"stat_value": 1,
} in stats
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-201977-202322-202693-203078",
"opponent_lineup_id": "201566-202683-203460-203506-203924",
"stat_key": "SecondChanceSecondsPlayedDef",
"stat_value": 14.0,
} in stats
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "201566-202683-203460-203506-203924",
"opponent_lineup_id": "101162-201977-202322-202693-203078",
"stat_key": "SecondChanceOffPoss",
"stat_value": 1,
} in stats
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "201566-202683-203460-203506-203924",
"opponent_lineup_id": "101162-201977-202322-202693-203078",
"stat_key": "SecondChanceSecondsPlayedOff",
"stat_value": 14.0,
} in stats
def test_first_possession_stats(self):
results = self.game.possessions.items[0].possession_stats
assert len(results) == 48
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "OffPoss",
"stat_value": 1,
} in results
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "PlusMinus",
"stat_value": 2,
} in results
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 101162,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "DefPoss",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "OpponentPoints",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "PlusMinus",
"stat_value": -2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 202322,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "OffPoss",
"stat_value": 1,
} in results
assert {
"player_id": 202322,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "PlusMinus",
"stat_value": 2,
} in results
assert {
"player_id": 202322,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 202322,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "OffPoss",
"stat_value": 1,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "PlusMinus",
"stat_value": 2,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Total2ptShotDistance",
"stat_value": 12.1,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 1,
} in results
assert {
"player_id": 202693,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "UnassistedShortMidRange",
"stat_value": 1,
} in results
assert {
"player_id": 203078,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "OffPoss",
"stat_value": 1,
} in results
assert {
"player_id": 203078,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "PlusMinus",
"stat_value": 2,
} in results
assert {
"player_id": 203078,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 203078,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 203460,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "DefPoss",
"stat_value": 1,
} in results
assert {
"player_id": 203460,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "OpponentPoints",
"stat_value": 2,
} in results
assert {
"player_id": 203460,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "PlusMinus",
"stat_value": -2,
} in results
assert {
"player_id": 203460,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 203460,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 203490,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "OffPoss",
"stat_value": 1,
} in results
assert {
"player_id": 203490,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "PlusMinus",
"stat_value": 2,
} in results
assert {
"player_id": 203490,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 203490,
"team_id": 1610612764,
"opponent_team_id": 1610612760,
"lineup_id": "101162-202322-202693-203078-203490",
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 21.0,
} in results
assert {
"player_id": 203500,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "DefPoss",
"stat_value": 1,
} in results
assert {
"player_id": 203500,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "OpponentPoints",
"stat_value": 2,
} in results
assert {
"player_id": 203500,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "PlusMinus",
"stat_value": -2,
} in results
assert {
"player_id": 203500,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 203500,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 203506,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "DefPoss",
"stat_value": 1,
} in results
assert {
"player_id": 203506,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "OpponentPoints",
"stat_value": 2,
} in results
assert {
"player_id": 203506,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "PlusMinus",
"stat_value": -2,
} in results
assert {
"player_id": 203506,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 203506,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 1627734,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "DefPoss",
"stat_value": 1,
} in results
assert {
"player_id": 1627734,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "OpponentPoints",
"stat_value": 2,
} in results
assert {
"player_id": 1627734,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "PlusMinus",
"stat_value": -2,
} in results
assert {
"player_id": 1627734,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "SecondsPlayedDef",
"stat_value": 21.0,
} in results
assert {
"player_id": 1627734,
"team_id": 1610612760,
"opponent_team_id": 1610612764,
"lineup_id": "1627734-201566-203460-203500-203506",
"opponent_lineup_id": "101162-202322-202693-203078-203490",
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 21.0,
} in results
def test_team_stats(self):
results = self.game.possessions.team_stats
assert len(results) == 434
assert {
"team_id": 1610612760,
"stat_key": "1627734:AssistsTo:201566:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "1627734:AssistsTo:203506:LongMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:1627734:AtRim",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:202683:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203460:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203460:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203500:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203506:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201566:AssistsTo:203530:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201627:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201627:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201627:AssistsTo:203530:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "201627:AssistsTo:203924:Corner3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:1627734:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:1627734:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:201566:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:201566:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:203460:Corner3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203506:AssistsTo:203500:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203530:AssistsTo:201627:Corner3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203902:AssistsTo:201627:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203902:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203902:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "203924:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "2pt And 1 Free Throw Trips",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "2pt Shooting Foul Free Throw Trips",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3Assists",
"stat_value": 7,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3DefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3DefRebounds",
"stat_value": 10,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3OffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3OffRebounded",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3OffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Arc3OffRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AssistedArc3",
"stat_value": 7,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AssistedAtRim",
"stat_value": 14,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AssistedCorner3",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AssistedLongMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AssistedShortMidRange",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimAssists",
"stat_value": 14,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlocked",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedDefReboundOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedDefRebounds",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffRebounded",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundedOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffRebounds",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimDefReboundOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimDefRebounds",
"stat_value": 8,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimOffReboundOpportunities",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimOffRebounded",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimOffReboundedOpportunities",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimOffRebounds",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "AtRimSelfOReb",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BadPassOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BadPassSteals",
"stat_value": 8,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BadPassTurnovers",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BlockedAtRim",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BlockedAtRimRecovered",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BlockedShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "BlockedShortMidRangeRecovered",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3Assists",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3DefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3DefRebounds",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3OffReboundOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3OffRebounded",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3OffReboundedOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Corner3OffRebounds",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "DeadBallTurnovers",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "DefPoss",
"stat_value": 108,
} in results
assert {
"team_id": 1610612760,
"stat_key": "DefensiveGoaltends",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTDefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTDefRebounds",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTOffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTOffRebounded",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTOffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FTOffRebounds",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FtsMade",
"stat_value": 21,
} in results
assert {
"team_id": 1610612760,
"stat_key": "TechFtsMade",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FtsMissed",
"stat_value": 9,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeAssists",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeDefReboundOpportunities",
"stat_value": 9,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeDefRebounds",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounded",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundedOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounds",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Loose Ball Fouls",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Loose Ball Fouls Drawn",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LostBallOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LostBallSteals",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "LostBallTurnovers",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "MissedArc3",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "MissedAtRim",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "MissedCorner3",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "MissedLongMidRange",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "MissedShortMidRange",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "OffPoss",
"stat_value": 109,
} in results
assert {
"team_id": 1610612760,
"stat_key": "OnFloorOffReb",
"stat_value": 65,
} in results
assert {
"team_id": 1610612760,
"stat_key": "OpponentPoints",
"stat_value": 115,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Penalty Free Throw Trips",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Personal Block Fouls Drawn",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Personal Fouls",
"stat_value": 7,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Personal Fouls Drawn",
"stat_value": 7,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Personal Take Fouls",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Personal Take Fouls Drawn",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PlusMinus",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Putbacks",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondsPlayedDef",
"stat_value": 1636,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondsPlayedOff",
"stat_value": 1544,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Shooting Block Fouls Drawn",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Shooting Fouls",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Shooting Fouls Drawn",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeAssists",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeBlockedDefReboundOpportunities",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeBlockedDefRebounds",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeDefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeDefRebounds",
"stat_value": 8,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffRebounded",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundedOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "ShotClockViolations",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Technical Free Throw Trips",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Total2ptShotDistance",
"stat_value": 428.5,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 65,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Total3ptShotDistance",
"stat_value": 728.0,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Total3ptShotsWithDistance",
"stat_value": 30,
} in results
assert {
"team_id": 1610612760,
"stat_key": "Travels",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "UnassistedArc3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "UnassistedAtRim",
"stat_value": 9,
} in results
assert {
"team_id": 1610612760,
"stat_key": "UnassistedLongMidRange",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "UnassistedShortMidRange",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceDefPoss",
"stat_value": 13,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceOffPoss",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceBadPassOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceDeadBallTurnovers",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceShotClockViolations",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceAssistedArc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceMissedArc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceMissedAtRim",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceMissedLongMidRange",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceMissedShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceUnassistedAtRim",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceFtsMade",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChanceFtsMissed",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyDefPoss",
"stat_value": 45,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyOffPoss",
"stat_value": 25,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyBadPassTurnovers",
"stat_value": 3,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyLostBallTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyTravels",
"stat_value": 1,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyUnassistedAtRim",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyAssistedAtRim",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyAssistedArc3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyMissedArc3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyFtsMade",
"stat_value": 11,
} in results
assert {
"team_id": 1610612760,
"stat_key": "PenaltyFtsMissed",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FinalMinutePenaltyTakeFoulOffPoss",
"stat_value": 2,
} in results
assert {
"team_id": 1610612760,
"stat_key": "FinalMinutePenaltyTakeFoulFtsMade",
"stat_value": 4,
} in results
assert {
"team_id": 1610612760,
"stat_key": "SecondChance2pt Shooting Foul Free Throw Trips",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "101162:AssistsTo:203078:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "101162:AssistsTo:203078:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "1626162:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:101162:AtRim",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:1626162:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:201160:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:202693:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203078:AtRim",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203078:Corner3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203078:LongMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203078:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203490:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202322:AssistsTo:203490:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202693:AssistsTo:101162:ShortMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "202693:AssistsTo:203078:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "203078:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "203107:AssistsTo:1626162:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "203107:AssistsTo:201977:Arc3",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "203107:AssistsTo:202693:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "203490:AssistsTo:1626162:AtRim",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "2pt And 1 Free Throw Trips",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "2pt Shooting Foul Free Throw Trips",
"stat_value": 11,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3Assists",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3DefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3DefRebounds",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3OffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3OffRebounded",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3OffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Arc3OffRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AssistedArc3",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AssistedAtRim",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AssistedCorner3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AssistedLongMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AssistedShortMidRange",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimAssists",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlocked",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedDefReboundOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedDefRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedOffReboundOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedOffRebounded",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedOffReboundedOpportunities",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimBlockedOffRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimDefReboundOpportunities",
"stat_value": 11,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimDefRebounds",
"stat_value": 8,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimOffReboundOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimOffRebounded",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimOffReboundedOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "AtRimOffRebounds",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "BadPassSteals",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "BadPassTurnovers",
"stat_value": 8,
} in results
assert {
"team_id": 1610612764,
"stat_key": "BlockedAtRim",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "BlockedAtRimRecovered",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Corner3Assists",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Corner3DefReboundOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Corner3DefRebounds",
"stat_value": 5,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Corner3OffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Corner3OffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "DeadBallTurnovers",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "DefPoss",
"stat_value": 109,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FTDefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FTDefRebounds",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FTOffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FTOffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FtsMade",
"stat_value": 16,
} in results
assert {
"team_id": 1610612764,
"stat_key": "TechFtsMade",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FtsMissed",
"stat_value": 9,
} in results
assert {
"team_id": 1610612764,
"stat_key": "HeaveMisses",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeAssists",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeDefReboundOpportunities",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeDefRebounds",
"stat_value": 7,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeOffReboundOpportunities",
"stat_value": 9,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeOffRebounded",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeOffReboundedOpportunities",
"stat_value": 9,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LongMidRangeOffRebounds",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Loose Ball Fouls",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Loose Ball Fouls Drawn",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LostBallOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LostBallSteals",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "LostBallTurnovers",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "MissedArc3",
"stat_value": 13,
} in results
assert {
"team_id": 1610612764,
"stat_key": "MissedAtRim",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "MissedCorner3",
"stat_value": 5,
} in results
assert {
"team_id": 1610612764,
"stat_key": "MissedLongMidRange",
"stat_value": 9,
} in results
assert {
"team_id": 1610612764,
"stat_key": "MissedShortMidRange",
"stat_value": 13,
} in results
assert {
"team_id": 1610612764,
"stat_key": "OffPoss",
"stat_value": 108,
} in results
assert {
"team_id": 1610612764,
"stat_key": "OnFloorOffReb",
"stat_value": 65,
} in results
assert {
"team_id": 1610612764,
"stat_key": "OpponentPoints",
"stat_value": 126,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Penalty Free Throw Trips",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Personal Block Fouls",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Personal Fouls",
"stat_value": 7,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Personal Fouls Drawn",
"stat_value": 7,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Personal Take Fouls",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Personal Take Fouls Drawn",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "PlusMinus",
"stat_value": -11,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Putbacks",
"stat_value": 3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondsPlayedDef",
"stat_value": 1544,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondsPlayedOff",
"stat_value": 1636,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Shooting Block Fouls",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Shooting Fouls",
"stat_value": 11,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Shooting Fouls Drawn",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeAssists",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeBlocked",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeBlockedOffReboundOpportunities",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeBlockedOffReboundedOpportunities",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeDefReboundOpportunities",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeDefRebounds",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeOffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeOffRebounded",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeOffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeOffRebounds",
"stat_value": 4,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShortMidRangeSelfOReb",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "ShotClockViolations",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Technical Free Throw Trips",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Total2ptShotDistance",
"stat_value": 442.3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 70,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Total3ptShotDistance",
"stat_value": 697.3,
} in results
assert {
"team_id": 1610612764,
"stat_key": "Total3ptShotsWithDistance",
"stat_value": 28,
} in results
assert {
"team_id": 1610612764,
"stat_key": "UnassistedArc3",
"stat_value": 2,
} in results
assert {
"team_id": 1610612764,
"stat_key": "UnassistedAtRim",
"stat_value": 10,
} in results
assert {
"team_id": 1610612764,
"stat_key": "UnassistedLongMidRange",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "UnassistedShortMidRange",
"stat_value": 6,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondChanceDefPoss",
"stat_value": 11,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondChanceOffPoss",
"stat_value": 13,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondChanceDeadBallTurnovers",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "SecondChanceShotClockViolations",
"stat_value": 1,
} in results
assert {
"team_id": 1610612764,
"stat_key": "PenaltyDefPoss",
"stat_value": 25,
} in results
assert {
"team_id": 1610612764,
"stat_key": "PenaltyOffPoss",
"stat_value": 45,
} in results
assert {
"team_id": 1610612764,
"stat_key": "FinalMinutePenaltyTakeFoulDefPoss",
"stat_value": 2,
} in results
def test_opponent_stats(self):
results = self.game.possessions.opponent_stats
assert len(results) == 434
assert {
"opponent_team_id": 1610612760,
"stat_key": "PenaltyPersonal Fouls",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "PenaltyPersonal Fouls Drawn",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "PenaltyPersonal Take Fouls",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "PenaltyShooting Fouls",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "PenaltyShooting Fouls Drawn",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "101162:AssistsTo:203078:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "101162:AssistsTo:203078:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "1626162:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:101162:AtRim",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:1626162:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:201160:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:202693:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203078:AtRim",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203078:Corner3",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203078:LongMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203078:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203490:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202322:AssistsTo:203490:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202693:AssistsTo:101162:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "202693:AssistsTo:203078:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "203078:AssistsTo:203490:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "203107:AssistsTo:1626162:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "203107:AssistsTo:201977:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "203107:AssistsTo:202693:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "203490:AssistsTo:1626162:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "2pt And 1 Free Throw Trips",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "2pt Shooting Foul Free Throw Trips",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3Assists",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3DefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3DefRebounds",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3OffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3OffRebounded",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3OffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Arc3OffRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AssistedArc3",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AssistedAtRim",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AssistedCorner3",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AssistedLongMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AssistedShortMidRange",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimAssists",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlocked",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedDefReboundOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedDefRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedOffRebounded",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundedOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimBlockedOffRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimDefReboundOpportunities",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimDefRebounds",
"stat_value": 8,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimOffReboundOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimOffRebounded",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimOffReboundedOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "AtRimOffRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "BadPassSteals",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "BadPassTurnovers",
"stat_value": 8,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "BlockedAtRim",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "BlockedAtRimRecovered",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Corner3Assists",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Corner3DefReboundOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Corner3DefRebounds",
"stat_value": 5,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Corner3OffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Corner3OffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "DeadBallTurnovers",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "DefPoss",
"stat_value": 109,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FTDefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FTDefRebounds",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FTOffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FTOffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FtsMade",
"stat_value": 16,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "FtsMissed",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "HeaveMisses",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeAssists",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeDefReboundOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeDefRebounds",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundOpportunities",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounded",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundedOpportunities",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounds",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Loose Ball Fouls",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Loose Ball Fouls Drawn",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LostBallOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LostBallSteals",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "LostBallTurnovers",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "MissedArc3",
"stat_value": 13,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "MissedAtRim",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "MissedCorner3",
"stat_value": 5,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "MissedLongMidRange",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "MissedShortMidRange",
"stat_value": 13,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "OffPoss",
"stat_value": 108,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "OnFloorOffReb",
"stat_value": 65,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "OpponentPoints",
"stat_value": 126,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Penalty Free Throw Trips",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Personal Block Fouls",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Personal Fouls",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Personal Fouls Drawn",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Personal Take Fouls",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Personal Take Fouls Drawn",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "PlusMinus",
"stat_value": -11,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Putbacks",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "SecondsPlayedDef",
"stat_value": 1544,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "SecondsPlayedOff",
"stat_value": 1636,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Shooting Block Fouls",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Shooting Fouls",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Shooting Fouls Drawn",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeAssists",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeBlocked",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeBlockedOffReboundOpportunities",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeBlockedOffReboundedOpportunities",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeDefReboundOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeDefRebounds",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeOffRebounded",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeOffRebounds",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShortMidRangeSelfOReb",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "ShotClockViolations",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Technical Free Throw Trips",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Total2ptShotDistance",
"stat_value": 442.3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 70,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Total3ptShotDistance",
"stat_value": 697.3,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "Total3ptShotsWithDistance",
"stat_value": 28,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "UnassistedArc3",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "UnassistedAtRim",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "UnassistedLongMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "UnassistedShortMidRange",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "SecondChanceDefPoss",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612760,
"stat_key": "SecondChanceOffPoss",
"stat_value": 13,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "1627734:AssistsTo:201566:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "1627734:AssistsTo:203506:LongMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:1627734:AtRim",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:202683:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203460:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203460:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203500:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203506:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201566:AssistsTo:203530:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201627:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201627:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201627:AssistsTo:203530:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "201627:AssistsTo:203924:Corner3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:1627734:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:1627734:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:201566:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:201566:ShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:203460:Corner3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203506:AssistsTo:203500:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203530:AssistsTo:201627:Corner3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203902:AssistsTo:201627:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203902:AssistsTo:202683:AtRim",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203902:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "203924:AssistsTo:203506:Arc3",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "2pt And 1 Free Throw Trips",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "2pt Shooting Foul Free Throw Trips",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3Assists",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3DefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3DefRebounds",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3OffReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3OffRebounded",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3OffReboundedOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Arc3OffRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AssistedArc3",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AssistedAtRim",
"stat_value": 14,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AssistedCorner3",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AssistedLongMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AssistedShortMidRange",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimAssists",
"stat_value": 14,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlocked",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedDefReboundOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedDefRebounds",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedOffReboundOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedOffRebounded",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedOffReboundedOpportunities",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimBlockedOffRebounds",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimDefReboundOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimDefRebounds",
"stat_value": 8,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimOffReboundOpportunities",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimOffRebounded",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimOffReboundedOpportunities",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimOffRebounds",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "AtRimSelfOReb",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BadPassOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BadPassSteals",
"stat_value": 8,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BadPassTurnovers",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BlockedAtRim",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BlockedAtRimRecovered",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BlockedShortMidRange",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "BlockedShortMidRangeRecovered",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3Assists",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3DefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3DefRebounds",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3OffReboundOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3OffRebounded",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3OffReboundedOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Corner3OffRebounds",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "DeadBallTurnovers",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "DefPoss",
"stat_value": 108,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "DefensiveGoaltends",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTDefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTDefRebounds",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTOffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTOffRebounded",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTOffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FTOffRebounds",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FtsMade",
"stat_value": 21,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "FtsMissed",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeAssists",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeDefReboundOpportunities",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeDefRebounds",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeOffReboundOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeOffRebounded",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeOffReboundedOpportunities",
"stat_value": 10,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LongMidRangeOffRebounds",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Loose Ball Fouls",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Loose Ball Fouls Drawn",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LostBallOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LostBallSteals",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "LostBallTurnovers",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "MissedArc3",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "MissedAtRim",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "MissedCorner3",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "MissedLongMidRange",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "MissedShortMidRange",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "OffPoss",
"stat_value": 109,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "OnFloorOffReb",
"stat_value": 65,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "OpponentPoints",
"stat_value": 115,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Penalty Free Throw Trips",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Personal Block Fouls Drawn",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Personal Fouls",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Personal Fouls Drawn",
"stat_value": 7,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Personal Take Fouls",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Personal Take Fouls Drawn",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PlusMinus",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Putbacks",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "SecondsPlayedDef",
"stat_value": 1636,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "SecondsPlayedOff",
"stat_value": 1544,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Shooting Block Fouls Drawn",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Shooting Fouls",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Shooting Fouls Drawn",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeAssists",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeBlockedDefReboundOpportunities",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeBlockedDefRebounds",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeDefReboundOpportunities",
"stat_value": 12,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeDefRebounds",
"stat_value": 8,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeOffReboundOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeOffRebounded",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeOffReboundedOpportunities",
"stat_value": 6,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShortMidRangeOffRebounds",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "ShotClockViolations",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Technical Free Throw Trips",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Total2ptShotDistance",
"stat_value": 428.5,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 65,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Total3ptShotDistance",
"stat_value": 728.0,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Total3ptShotsWithDistance",
"stat_value": 30,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "Travels",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "UnassistedArc3",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "UnassistedAtRim",
"stat_value": 9,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "UnassistedLongMidRange",
"stat_value": 4,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "UnassistedShortMidRange",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "SecondChanceDefPoss",
"stat_value": 13,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "SecondChanceOffPoss",
"stat_value": 11,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PenaltyPersonal Fouls",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PenaltyPersonal Fouls Drawn",
"stat_value": 1,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PenaltyPersonal Take Fouls Drawn",
"stat_value": 2,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PenaltyShooting Fouls",
"stat_value": 3,
} in results
assert {
"opponent_team_id": 1610612764,
"stat_key": "PenaltyShooting Fouls Drawn",
"stat_value": 4,
} in results
def test_player_stats(self):
results = self.game.possessions.player_stats
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyPersonal Take Fouls Drawn",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "OffPoss",
"stat_value": 87,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "DefPoss",
"stat_value": 88,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondsPlayedDef",
"stat_value": 1288.0,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondsPlayedOff",
"stat_value": 1179.0,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PlusMinus",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FtsMade",
"stat_value": 9,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "TechFtsMade",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AssistedAtRim",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AssistedShortMidRange",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "UnassistedArc3",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "UnassistedAtRim",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "UnassistedLongMidRange",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "UnassistedShortMidRange",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "MissedArc3",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "MissedAtRim",
"stat_value": 8,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "MissedCorner3",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "MissedLongMidRange",
"stat_value": 6,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "MissedShortMidRange",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlocked",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Putbacks",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Total2ptShotDistance",
"stat_value": 225.0,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Total2ptShotsWithDistance",
"stat_value": 29,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Total3ptShotDistance",
"stat_value": 148.3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Total3ptShotsWithDistance",
"stat_value": 6,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3Assists",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimAssists",
"stat_value": 7,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeAssists",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "2pt And 1 Free Throw Trips",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "2pt Shooting Foul Free Throw Trips",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Penalty Free Throw Trips",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Technical Free Throw Trips",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Personal Block Fouls Drawn",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Personal Fouls Drawn",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Personal Fouls",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Personal Take Fouls Drawn",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Personal Take Fouls",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Shooting Block Fouls Drawn",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Shooting Fouls Drawn",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "DeadBallTurnovers",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LostBallTurnovers",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "BadPassTurnovers",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LostBallSteals",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "BadPassSteals",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffRebounded",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundedOpportunities",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounded",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundedOpportunities",
"stat_value": 6,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Corner3DefReboundOpportunities",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Corner3OffReboundedOpportunities",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimOffRebounded",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimOffReboundedOpportunities",
"stat_value": 8,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffRebounded",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundedOpportunities",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3OffReboundedOpportunities",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3DefReboundOpportunities",
"stat_value": 10,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3DefRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3OffReboundOpportunities",
"stat_value": 9,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Arc3OffRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlockedDefReboundOpportunities",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlockedDefRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimBlockedOffReboundOpportunities",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimDefReboundOpportunities",
"stat_value": 8,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimDefRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimOffReboundOpportunities",
"stat_value": 9,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "AtRimOffRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Corner3OffReboundOpportunities",
"stat_value": 6,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FTDefReboundOpportunities",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FTDefRebounds",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FTOffReboundOpportunities",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FTOffRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeDefReboundOpportunities",
"stat_value": 7,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeDefRebounds",
"stat_value": 3,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeOffReboundOpportunities",
"stat_value": 9,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "LongMidRangeOffRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeBlockedDefReboundOpportunities",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeDefReboundOpportunities",
"stat_value": 5,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeDefRebounds",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "ShortMidRangeOffReboundOpportunities",
"stat_value": 5,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "OnFloorOffReb",
"stat_value": 12,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period1Fouls0SecondsPlayedDef",
"stat_value": 193,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period1Fouls0SecondsPlayedOff",
"stat_value": 178,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period1Fouls1SecondsPlayedDef",
"stat_value": 77,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period1Fouls1SecondsPlayedOff",
"stat_value": 67,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period2Fouls1SecondsPlayedDef",
"stat_value": 144,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period2Fouls1SecondsPlayedOff",
"stat_value": 155,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period2Fouls2SecondsPlayedDef",
"stat_value": 111,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period2Fouls2SecondsPlayedOff",
"stat_value": 114,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period3Fouls2SecondsPlayedDef",
"stat_value": 273,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period3Fouls2SecondsPlayedOff",
"stat_value": 307,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period4Fouls2SecondsPlayedDef",
"stat_value": 335,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "Period4Fouls2SecondsPlayedOff",
"stat_value": 213,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PeriodOTFouls2SecondsPlayedDef",
"stat_value": 141,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PeriodOTFouls2SecondsPlayedOff",
"stat_value": 141,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PeriodOTFouls3SecondsPlayedDef",
"stat_value": 14,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PeriodOTFouls3SecondsPlayedOff",
"stat_value": 4,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceDefPoss",
"stat_value": 9,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceOffPoss",
"stat_value": 10,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceSecondsPlayedDef",
"stat_value": 93,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceSecondsPlayedOff",
"stat_value": 47,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceBadPassOutOfBoundsTurnovers",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceMissedAtRim",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "SecondChanceUnassistedAtRim",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyDefPoss",
"stat_value": 35,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyOffPoss",
"stat_value": 17,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyBadPassTurnovers",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyLostBallSteals",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyUnassistedAtRim",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyAssistedAtRim",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyMissedArc3",
"stat_value": 1,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "PenaltyFtsMade",
"stat_value": 5,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FinalMinutePenaltyTakeFoulOffPoss",
"stat_value": 2,
} in results
assert {
"player_id": 201566,
"team_id": 1610612760,
"stat_key": "FinalMinutePenaltyTakeFoulFtsMade",
"stat_value": 4,
} in results
def test_lineup_stats(self):
results = self.game.possessions.lineup_stats
assert {
"lineup_id": "1627734-201566-203460-203500-203506",
"team_id": 1610612760,
"stat_key": "OffPoss",
"stat_value": 19,
} in results
assert {
"lineup_id": "1627734-201566-203460-203500-203506",
"team_id": 1610612760,
"stat_key": "DefPoss",
"stat_value": 19,
} in results
assert {
"lineup_id": "1627734-201566-203460-203500-203506",
"team_id": 1610612760,
"stat_key": "SecondsPlayedDef",
"stat_value": 358,
} in results
assert {
"lineup_id": "1627734-201566-203460-203500-203506",
"team_id": 1610612760,
"stat_key": "SecondsPlayedOff",
"stat_value": 313,
} in results
assert {
"lineup_id": "1627734-201566-203460-203500-203506",
"team_id": 1610612760,
"stat_key": "PlusMinus",
"stat_value": 4,
} in results
def test_lineup_opponent_stats(self):
results = self.game.possessions.lineup_opponent_stats
assert {
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"opponent_team_id": 1610612760,
"stat_key": "OffPoss",
"stat_value": 19,
} in results
assert {
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"opponent_team_id": 1610612760,
"stat_key": "DefPoss",
"stat_value": 19,
} in results
assert {
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"opponent_team_id": 1610612760,
"stat_key": "SecondsPlayedDef",
"stat_value": 313,
} in results
assert {
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"opponent_team_id": 1610612760,
"stat_key": "SecondsPlayedOff",
"stat_value": 358,
} in results
assert {
"opponent_lineup_id": "1627734-201566-203460-203500-203506",
"opponent_team_id": 1610612760,
"stat_key": "PlusMinus",
"stat_value": -4,
} in results
| 32.533109
| 88
| 0.50295
| 11,758
| 135,598
| 5.538357
| 0.029767
| 0.072328
| 0.166539
| 0.126536
| 0.971744
| 0.962761
| 0.954699
| 0.94934
| 0.912254
| 0.808876
| 0
| 0.181119
| 0.388951
| 135,598
| 4,167
| 89
| 32.540917
| 0.604813
| 0
| 0
| 0.960945
| 0
| 0
| 0.335765
| 0.105842
| 0
| 0
| 0
| 0
| 0.183462
| 1
| 0.003616
| false
| 0.003857
| 0.000482
| 0
| 0.005063
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
798f9b25c251c3f82ed0e6be51afe5f7c202ae62
| 43,290
|
py
|
Python
|
Testing/Python/TestNodeSetsByGeometry.py
|
Numerics88/vtkbone
|
5a6ab2870679e9e7ea51926c34911607b9d85235
|
[
"MIT"
] | 3
|
2017-04-04T04:59:22.000Z
|
2022-03-13T11:22:40.000Z
|
Testing/Python/TestNodeSetsByGeometry.py
|
Numerics88/vtkbone
|
5a6ab2870679e9e7ea51926c34911607b9d85235
|
[
"MIT"
] | 5
|
2017-04-06T19:46:39.000Z
|
2019-12-11T23:41:41.000Z
|
Testing/Python/TestNodeSetsByGeometry.py
|
Numerics88/vtkbone
|
5a6ab2870679e9e7ea51926c34911607b9d85235
|
[
"MIT"
] | 2
|
2017-04-29T20:54:57.000Z
|
2017-04-29T22:28:10.000Z
|
from __future__ import division
import sys
import numpy
from numpy.core import *
import vtk
from vtk.util.numpy_support import vtk_to_numpy, numpy_to_vtk
import vtkbone
import test_geometries
import traceback
import unittest
class TestNodeSetsByGeometry (unittest.TestCase):
def test_DetermineMaterialBounds (self):
geometry = test_geometries.generate_quasi_donut_geometry_two_materials()
bounds = zeros(6, float)
vtkbone.vtkboneNodeSetsByGeometry.DetermineMaterialBounds(geometry, bounds, -1)
expected_bounds = array([0.0, 5.0, 0.0, 5.0, 0.0, 3.0])
self.assertTrue (alltrue(bounds == expected_bounds))
# Material 1 has 0 < x < 2
vtkbone.vtkboneNodeSetsByGeometry.DetermineMaterialBounds(geometry, bounds, 1)
expected_bounds = array([0.0, 2.0, 0.0, 5.0, 0.0, 3.0])
self.assertTrue (alltrue(bounds == expected_bounds))
# Material 1 has 2 < x < 5
vtkbone.vtkboneNodeSetsByGeometry.DetermineMaterialBounds(geometry, bounds, 2)
expected_bounds = array([2.0, 5.0, 0.0, 5.0, 0.0, 3.0])
self.assertTrue (alltrue(bounds == expected_bounds))
# Try shifted origin
geometry2 = test_geometries.generate_quasi_donut_geometry_two_materials_offset()
# The following conditional is useful for writing out the model.
if 0:
writer = vtk.vtkXMLUnstructuredGridWriter()
writer.SetInput(geometry2)
writer.SetFileName("geometry2.vtu")
writer.Update()
vtkbone.vtkboneNodeSetsByGeometry.DetermineMaterialBounds(geometry2, bounds, -1)
expected_bounds = array((0.5, 3.0, 1.0, 6.0, -0.5, 5.5))
self.assertTrue (alltrue(bounds == expected_bounds))
def test_FindNodesOnPlane (self):
geometry = test_geometries.generate_quasi_donut_geometry_two_materials_offset()
expected_bounds = array((0.5, 3.0, 1.0, 6.0, -0.5, 5.5))
# X min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(0, 0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 62, 98, 130,
20, 56, 92, 124,
16, 50, 86, 120,
12, 44, 80, 116,
6, 38, 74, 110,
0, 32, 68, 104)))
self.assertTrue (alltrue(ids == expected_ids))
# X min surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(0, 0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min surface, material type 2 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(0, 0.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(0, 3.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((31, 67, 103, 135,
25, 61, 97, 129,
19, 55, 91, 123,
15, 49, 85, 119,
11, 43, 79, 115,
5, 37, 73, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# X max surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(0, 3.0, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# Y min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 1.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 0, 1, 2, 3, 4, 5,
32, 33, 34, 35, 36, 37,
68, 69, 70, 71, 72, 73,
104, 105, 106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 1.0, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 0, 1, 2,
32, 33, 34,
68, 69, 70,
104, 105, 106)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min surface, material type 2 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 1.0, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 2, 3, 4, 5,
34, 35, 36, 37,
70, 71, 72, 73,
106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 6.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 26, 27, 28, 29, 30, 31,
62, 63, 64, 65, 66, 67,
98, 99, 100, 101, 102, 103,
130, 131, 132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 6.0, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 26, 27, 28,
62, 63, 64,
98, 99, 100,
130, 131, 132)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, material type 2 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(1, 6.0, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 28, 29, 30, 31,
64, 65, 66, 67,
100, 101, 102, 103,
132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28, 29, 30, 31,
20, 21, 22, 23, 24, 25,
16, 17, 18, 19,
12, 13, 14, 15,
6, 7, 8, 9, 10, 11,
0, 1, 2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28,
20, 21, 22,
16, 17,
12, 13,
6, 7, 8,
0, 1, 2)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, material type 2 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((28, 29, 30, 31,
22, 23, 24, 25,
18, 19,
14, 15,
8, 9, 10, 11,
2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132, 133, 134, 135,
124, 125, 126, 127, 128, 129,
120, 121, 122, 123,
116, 117, 118, 119,
110, 111, 112, 113, 114, 115,
104, 105, 106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, material type 1 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132,
124, 125, 126,
120, 121,
116, 117,
110, 111, 112,
104, 105, 106)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, material type 2 elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnPlane(2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((132, 133, 134, 135,
126, 127, 128, 129,
122, 123,
118, 119,
112, 113, 114, 115,
106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
def test_FindNodesIntersectingTwoPlanes (self):
geometry = test_geometries.generate_quasi_donut_geometry_two_materials_offset()
expected_bounds = array((0.5, 3.0, 1.0, 6.0, -0.5, 5.5))
# X min, Y min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 1.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((0, 32, 68, 104)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 1.0, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 1.0, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 1.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((5, 37, 73, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 1.0, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 1.0, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 6.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 62, 98, 130)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 6.0, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 1, 6.0, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 6.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((31, 67, 103, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 6.0, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 1, 6.0, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 20, 16, 12, 6, 0)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, -0.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Z min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((31, 25, 19, 15, 11, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Z min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, -0.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Z min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 124, 120, 116, 110, 104)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 124, 120, 116, 110, 104)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Z max edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 0.5, 2, 5.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Z max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((135, 129, 123, 119, 115, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Z max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, 5.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Z max edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
0, 3.0, 2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((0, 1, 2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((0, 1, 2)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Try the previous test with a different order of axes.
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
2, -0.5, 1, 1.0, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z min edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28, 29, 30, 31)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z min edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z min edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((28, 29, 30, 31)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((104, 105, 106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((104, 105, 106)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min, Z max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 1.0, 2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z max edge, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z max edge, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max, Z max edge, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingTwoPlanes(
1, 6.0, 2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
def test_FindNodesIntersectingThreePlanes (self):
geometry = test_geometries.generate_quasi_donut_geometry_two_materials_offset()
expected_bounds = array((0.5, 3.0, 1.0, 6.0, -0.5, 5.5))
# X min, Y min, Z min corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((0,)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min, Z min corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min, Z min corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, -0.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min, Z min corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((5,)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y min, Z min corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, -0.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min, Z min corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z min corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26,)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z min corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, -0.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z min corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, -0.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max, Z min corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, -0.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((31,)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y max, Z min corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, -0.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max, Z min corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, -0.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min, Z max corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((104,)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min, Z max corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y min, Z max corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 1.0, 2, 5.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min, Z max corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((109,)))
self.assertTrue (alltrue(ids == expected_ids))
# same as previous, but change order of axes
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
1, 1.0, 2, 5.5, 0, 3.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# same as previous, but change order of axes
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
2, 5.5, 0, 3.0, 1, 1.0, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y min, Z max corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, 5.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y min, Z max corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 1.0, 2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z max corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130,)))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z max corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, 5.5, ids_vtk, geometry, 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min, Y max, Z max corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 0.5, 1, 6.0, 2, 5.5, ids_vtk, geometry, 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max, Z max corner, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, 5.5, ids_vtk, geometry, -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((135,)))
self.assertTrue (alltrue(ids == expected_ids))
# X max, Y max, Z max corner, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, 5.5, ids_vtk, geometry, 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max, Y max, Z max corner, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesIntersectingThreePlanes(
0, 3.0, 1, 6.0, 2, 5.5, ids_vtk, geometry, 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# static void FindNodesOnVisibleSurface(
# vtkIdTypeArray *visibleNodesIds,
# vtkUnstructuredGrid *ug,
# double normalVector[3],
# int specificMaterial = -1);
def test_FindNodesOnVisibleSurface (self):
geometry = test_geometries.generate_quasi_donut_geometry_two_materials_offset()
# X min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (-1,0,0), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 62, 98, 130,
20, 56, 92, 124,
16, 50, 86, 120,
12, 44, 80, 116,
6, 38, 74, 110,
0, 32, 68, 104)))
self.assertTrue (alltrue(ids == expected_ids))
# X min surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (-1,0,0), 1)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# X min surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (-1,0,0), 2)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (1,0,0), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((31, 67, 103, 135,
25, 61, 97, 129,
19, 55, 91, 123,
15, 49, 85, 119,
11, 43, 79, 115,
5, 37, 73, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# X max surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (1,0,0), 1)
self.assertEqual (ids_vtk.GetNumberOfTuples(), 0)
# X max surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (1,0,0), 2)
ids = sort(vtk_to_numpy(ids_vtk))
self.assertTrue (alltrue(ids == expected_ids))
# Y min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,-1,0), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 0, 1, 2, 3, 4, 5,
32, 33, 34, 35, 36, 37,
68, 69, 70, 71, 72, 73,
104, 105, 106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,-1,0), 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 0, 1, 2,
32, 33, 34,
68, 69, 70,
104, 105, 106)))
self.assertTrue (alltrue(ids == expected_ids))
# Y min surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,-1,0), 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 2, 3, 4, 5,
34, 35, 36, 37,
70, 71, 72, 73,
106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,1,0), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 26, 27, 28, 29, 30, 31,
62, 63, 64, 65, 66, 67,
98, 99, 100, 101, 102, 103,
130, 131, 132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,1,0), 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 26, 27, 28,
62, 63, 64,
98, 99, 100,
130, 131, 132)))
self.assertTrue (alltrue(ids == expected_ids))
# Y max surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,1,0), 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array(( 28, 29, 30, 31,
64, 65, 66, 67,
100, 101, 102, 103,
132, 133, 134, 135)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,-1), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28, 29, 30, 31,
20, 21, 22, 23, 24, 25,
16, 17, 52, 53, 18, 19,
12, 13, 46, 47, 14, 15,
6, 7, 8, 9, 10, 11,
0, 1, 2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,-1), 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((26, 27, 28,
20, 21, 22,
16, 17, 52,
12, 13, 46,
6, 7, 8,
0, 1, 2)))
self.assertTrue (alltrue(ids == expected_ids))
# Z min surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,-1), 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((28, 29, 30, 31,
22, 23, 24, 25,
52, 53, 18, 19,
46, 47, 14, 15,
8, 9, 10, 11,
2, 3, 4, 5)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, all elements
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,1), -1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132, 133, 134, 135,
124, 125, 126, 127, 128, 129,
120, 121, 88, 89, 122, 123,
116, 117, 82, 83, 118, 119,
110, 111, 112, 113, 114, 115,
104, 105, 106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, material ID 1
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,1), 1)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((130, 131, 132,
124, 125, 126,
120, 121, 88,
116, 117, 82,
110, 111, 112,
104, 105, 106)))
self.assertTrue (alltrue(ids == expected_ids))
# Z max surface, material ID 2
ids_vtk = vtk.vtkIdTypeArray()
vtkbone.vtkboneNodeSetsByGeometry.FindNodesOnVisibleSurface(ids_vtk, geometry, (0,0,1), 2)
ids = sort(vtk_to_numpy(ids_vtk))
expected_ids = sort(array((132, 133, 134, 135,
126, 127, 128, 129,
88, 89, 122, 123,
82, 83, 118, 119,
112, 113, 114, 115,
106, 107, 108, 109)))
self.assertTrue (alltrue(ids == expected_ids))
if __name__ == '__main__':
unittest.main()
| 49.193182
| 100
| 0.534119
| 4,777
| 43,290
| 4.702114
| 0.044798
| 0.078533
| 0.039266
| 0.100347
| 0.95597
| 0.952898
| 0.950361
| 0.948891
| 0.948268
| 0.93745
| 0
| 0.08874
| 0.365881
| 43,290
| 879
| 101
| 49.249147
| 0.729518
| 0.086094
| 0
| 0.721286
| 0
| 0
| 0.000532
| 0
| 0
| 0
| 0
| 0
| 0.156202
| 1
| 0.007657
| false
| 0
| 0.015314
| 0
| 0.024502
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
79917092798277c1bd09a737f78342311dbc1f78
| 125
|
py
|
Python
|
6 kyu/Mutual Recursion.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
6 kyu/Mutual Recursion.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
6 kyu/Mutual Recursion.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def f(n):
if n==0:
return 1
return n-m(f(n-1))
def m(n):
if n==0:
return 0
return n-f(m(n-1))
| 15.625
| 22
| 0.432
| 28
| 125
| 1.928571
| 0.285714
| 0.388889
| 0.148148
| 0.185185
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.376
| 125
| 8
| 23
| 15.625
| 0.615385
| 0
| 0
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
799810384fbb047746f96d8b8fd5cb2f2b9d4c32
| 6,986
|
py
|
Python
|
test/graph_test.py
|
adgirish/ursa
|
c14fccacb81efd33e86453f979cb4ec799aa8a3a
|
[
"Apache-2.0"
] | null | null | null |
test/graph_test.py
|
adgirish/ursa
|
c14fccacb81efd33e86453f979cb4ec799aa8a3a
|
[
"Apache-2.0"
] | null | null | null |
test/graph_test.py
|
adgirish/ursa
|
c14fccacb81efd33e86453f979cb4ec799aa8a3a
|
[
"Apache-2.0"
] | null | null | null |
import ursa
import pytest
import ray
ray.init()
@pytest.fixture
def init_test():
return ursa.graph.Graph.remote(0)
def test_simple_insert():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
assert(ray.get(ray.get(
graph.select_row.remote(transaction_id, key))[0]) == "Value1")
assert(ray.get(ray.get(
graph.select_local_edges.remote(transaction_id, key))[0]) == set())
assert(ray.get(
graph.select_foreign_edges.remote(transaction_id, key))[0] == {})
def test_insert_with_local_edges():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set(["Key2", "Key3"])
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
assert(ray.get(ray.get(
graph.select_row.remote(transaction_id, key))[0]) == "Value1")
assert(ray.get(ray.get(graph.select_local_edges.remote(
transaction_id, key))[0]) == set(["Key2", "Key3"]))
assert(ray.get(
graph.select_foreign_edges.remote(transaction_id, key))[0] == {})
def test_insert_with_foreign_edges():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {"Other Graph": "Other Key"}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
assert(ray.get(ray.get(
graph.select_row.remote(transaction_id, key))[0]) == "Value1")
assert(ray.get(ray.get(
graph.select_local_edges.remote(transaction_id, key))[0]) == set())
assert(ray.get(ray.get(graph.select_foreign_edges.remote(
transaction_id, key))[0]["Other Graph"]) == set(["Other Key"]))
def test_insert_with_local_and_foreign_edges():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set(["Key2", "Key3"])
foreign_edges = {"Other Graph": "Other Key"}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
assert(ray.get(ray.get(
graph.select_row.remote(transaction_id, key))[0]) == "Value1")
assert(ray.get(ray.get(
graph.select_local_edges.remote(transaction_id, key))[0]) ==
set(["Key2", "Key3"]))
assert(ray.get(ray.get(graph.select_foreign_edges.remote(
transaction_id, key))[0]["Other Graph"]) == set(["Other Key"]))
def test_add_single_local_key():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
graph.add_local_edges.remote(transaction_id, key, "Key2")
assert(ray.get(ray.get(
graph.select_local_edges.remote(transaction_id, key))[0]) ==
set(["Key2"]))
def test_add_multiple_local_edges():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
graph.add_local_edges.remote(transaction_id, key, "Key2", "Key3", "Key4")
assert(ray.get(ray.get(
graph.select_local_edges.remote(transaction_id, key))[0]) ==
set(["Key2", "Key3", "Key4"]))
def test_add_single_foreign_key():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
graph.add_foreign_edges.remote(
transaction_id, key, "Other Graph", "Other Key1")
assert(ray.get(ray.get(graph.select_foreign_edges.remote(
transaction_id, key))[0]["Other Graph"]) == set(["Other Key1"]))
def test_add_multiple_foreign_edges():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
graph.add_foreign_edges.remote(
transaction_id, key, "Other Graph", "Other Key1", "Other Key2",
"Other Key3")
assert(ray.get(ray.get(graph.select_foreign_edges.remote(
transaction_id, key))[0]["Other Graph"]
) == set(["Other Key1", "Other Key2", "Other Key3"]))
def test_delete():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
assert(ray.get(graph.row_exists.remote(key, transaction_id)))
transaction_id += 1
graph.delete.remote("Key1", transaction_id)
assert(ray.get(graph.row_exists.remote(key, transaction_id - 1)))
assert(not ray.get(graph.row_exists.remote(key, transaction_id)))
def test_split():
graph = init_test()
key = "Key1"
oid = "Value1"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
key = "Key2"
oid = "Value2"
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote(key, oid, local_edges, foreign_edges, transaction_id)
second_graph = ursa.graph.Graph.remote(transaction_id,
graph.split.remote())
assert ray.get(graph.row_exists.remote("Key1", transaction_id))
assert not ray.get(second_graph.row_exists.remote("Key1", transaction_id))
assert not ray.get(graph.row_exists.remote("Key2", transaction_id))
assert ray.get(second_graph.row_exists.remote("Key2", transaction_id))
def test_update_deleted_row():
graph = init_test()
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote("Key3", "Value3", local_edges, foreign_edges,
transaction_id)
graph.insert.remote("Key4", "Value4", local_edges, foreign_edges,
transaction_id)
graph.delete.remote("Key3", transaction_id)
graph.update.remote("Key3", "UpdatedValue", local_edges, foreign_edges,
transaction_id)
assert "Key3" not in ray.get(graph.select_row.remote(transaction_id))
def test_non_existant_row():
graph = init_test()
local_edges = set()
foreign_edges = {}
transaction_id = 0
graph.insert.remote("Key3", "Value3", local_edges, foreign_edges,
transaction_id)
graph.insert.remote("Key4", "Value4", local_edges, foreign_edges,
transaction_id)
graph.delete.remote("Key3", transaction_id)
graph.update.remote("Key9999", "UpdatedValue", local_edges, foreign_edges,
transaction_id)
assert "Key9999" not in ray.get(graph.select_row.remote(transaction_id))
| 31.048889
| 78
| 0.65073
| 904
| 6,986
| 4.779867
| 0.058628
| 0.192548
| 0.14904
| 0.162
| 0.911363
| 0.904189
| 0.896783
| 0.877343
| 0.8461
| 0.83638
| 0
| 0.019423
| 0.211423
| 6,986
| 224
| 79
| 31.1875
| 0.76493
| 0
| 0
| 0.716763
| 0
| 0
| 0.073862
| 0
| 0
| 0
| 0
| 0
| 0.144509
| 1
| 0.075145
| false
| 0
| 0.017341
| 0.00578
| 0.098266
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
799ded0d4f0974f3ccdf83f82d23a7bb02e0d4f3
| 422
|
py
|
Python
|
server/ssh_server/qr_send.py
|
nickt121/CXA_2019_Recycle
|
9caa936109b4b6cf3f729069d3a3da9c929df04b
|
[
"MIT"
] | null | null | null |
server/ssh_server/qr_send.py
|
nickt121/CXA_2019_Recycle
|
9caa936109b4b6cf3f729069d3a3da9c929df04b
|
[
"MIT"
] | null | null | null |
server/ssh_server/qr_send.py
|
nickt121/CXA_2019_Recycle
|
9caa936109b4b6cf3f729069d3a3da9c929df04b
|
[
"MIT"
] | null | null | null |
import os
import image_p
def send():
image_p.pic()
os.system(r"sshpass -p '*0103549a' scp /home/pi/cam/capture.jpg nickie@10.143.209.103:'C:\Users\nicki\PycharmProjects\hackathon\python_main\test_images'")
os.system(r"sshpass -p '*0103549a' ssh nickie@10.143.209.103 'cd C:\Users\nicki\PycharmProjects\hackathon\python_main && C:\Users\nicki\PycharmProjects\hackathon\venv\Scripts\python.exe main_user.py'")
| 52.75
| 206
| 0.753555
| 68
| 422
| 4.588235
| 0.558824
| 0.057692
| 0.105769
| 0.25
| 0.669872
| 0.448718
| 0.288462
| 0
| 0
| 0
| 0
| 0.093506
| 0.087678
| 422
| 7
| 207
| 60.285714
| 0.716883
| 0
| 0
| 0
| 0
| 0.333333
| 0.777251
| 0.590047
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0.333333
| 0.333333
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
8dc7b5b2089813df585930fb8574400efe756e17
| 6,930
|
py
|
Python
|
models/uploaded_file.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | null | null | null |
models/uploaded_file.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | 11
|
2019-05-02T20:10:16.000Z
|
2022-02-10T07:10:25.000Z
|
models/uploaded_file.py
|
isstek/clientmanagement
|
26bd6bbd974f24211dd0ae4b1c75ee8e4b150767
|
[
"MIT"
] | 2
|
2020-11-04T03:05:23.000Z
|
2020-11-05T08:14:14.000Z
|
from django.db import models
from datetime import datetime, timedelta, timezone
from django.conf import settings
from django.urls import reverse
import pytz, uuid, os, random
from models import ticket, ticket_comment
from django.core.files.storage import default_storage
from django.utils.encoding import smart_str
from urllib.parse import quote, unquote
from django.http import HttpResponse, FileResponse
from clientmanagement import error_views
class UploadedFileTicket(models.Model):
for_ticket = models.ForeignKey(ticket.Ticket, on_delete=models.CASCADE, null=False, related_name="files")
createdon = models.DateTimeField("Created time", auto_now_add=True, null=False, blank=False)
filename = models.CharField(max_length=255, blank=True, null=True)
uplfile = models.FileField(max_length=255, blank=True, null=True)
def get_folder_name(self):
return for_ticket.get_files_folder()
def createtime(self):
return self.createdon.astimezone(pytz.timezone('America/New_York'))
def get_internal_link_to_file(self):
return reverse('get_ticket_file', kwargs={'ticketuuid': self.for_ticket.unid, 'filename': self.filename})
def get_link_to_file(self):
return settings.EMAIL_HOST_LINK + self.get_internal_link_to_file()
def get_internal_link_to_view_file(self):
return reverse('get_ticket_file_view', kwargs={'ticketuuid': self.for_ticket.unid, 'filename': self.filename})
def get_link_to_view_file(self):
return settings.EMAIL_HOST_LINK + self.get_internal_link_to_view_file()
def get_file_name(self):
return unquote(self.filename)
def isimage(self):
filename, extension=os.path.splitext(self.uplfile.name)
return extension.lower() in settings.IMAGE_FILE_EXTENSIONS
class UploadedFileComment(models.Model):
for_comment = models.ForeignKey(ticket_comment.TicketComment, on_delete=models.CASCADE, null=False, related_name="files")
createdon = models.DateTimeField("Created time", auto_now_add=True, null=False, blank=False)
filename = models.CharField(max_length=255, blank=True, null=True)
uplfile = models.FileField(max_length=255, blank=True, null=True)
def get_folder_name(self):
return for_comment.get_files_folder()
def createtime(self):
return self.createdon.astimezone(pytz.timezone('America/New_York'))
def get_internal_link_to_file(self):
return reverse('get_comment_file', kwargs={'ticketuuid': self.for_comment.initial_ticket.unid, 'filename': self.filename, 'commentid': self.for_comment.id})
def get_link_to_file(self):
return settings.EMAIL_HOST_LINK + self.get_internal_link_to_file()
def get_internal_link_to_view_file(self):
return reverse('get_comment_file_view', kwargs={'ticketuuid': self.for_comment.initial_ticket.unid, 'filename': self.filename, 'commentid': self.for_comment.id})
def get_link_to_view_file(self):
return settings.EMAIL_HOST_LINK + self.get_internal_link_to_view_file()
def get_file_name(self):
return unquote(self.filename)
def isimage(self):
filename, extension=os.path.splitext(self.uplfile.name)
return extension.lower() in settings.IMAGE_FILE_EXTENSIONS
def downloadFileFromTicket(request, ticketuuid, filename):
try:
tick = ticket.Ticket.objects.get(unid=ticketuuid)
except Exception as exc:
print(exc)
return error_views.notfound(request)
try:
resfile = UploadedFileTicket.objects.get(for_ticket=tick, filename=filename)
except Exception as exc:
print(exc)
return error_views.notfound(request)
response = HttpResponse(resfile.uplfile.read())
response['Content-Disposition'] = 'attachment; filename=%s' % smart_str(os.path.basename(resfile.uplfile.name))
response['X-Sendfile'] = smart_str(resfile.uplfile.name)
return response
def viewFileFromTicket(request, ticketuuid, filename):
try:
tick = ticket.Ticket.objects.get(unid=ticketuuid)
except Exception as exc:
print(exc)
return error_views.notfound(request)
try:
resfile = UploadedFileTicket.objects.get(for_ticket=tick, filename=filename)
except Exception as exc:
print(exc)
return error_views.notfound(request)
response = HttpResponse(resfile.uplfile.read(), 'image')
return response
def downloadFileFromComment(request, ticketuuid, commentid, filename):
try:
comment = ticket_comment.TicketComment.objects.get(id=commentid)
if comment.initial_ticket.unid != ticketuuid:
return error_views.notfound(request)
except Exception as exc:
print(exc)
return error_views.notfound(request)
try:
resfile = UploadedFileTicket.objects.get(for_ticket=comment, filename=filename)
except Exception as exc:
print(exc)
return error_views.notfound(request)
response = HttpResponse()
response['Content-Disposition'] = 'attachment; filename=%s' % smart_str(os.path.basename(resfile.uplfile.name))
response['X-Sendfile'] = smart_str(resfile.uplfile.name)
return response
def viewFileFromComment(request, ticketuuid, commentid, filename):
try:
comment = ticket_comment.TicketComment.objects.get(id=commentid)
if comment.initial_ticket.unid != ticketuuid:
return error_views.notfound(request)
except Exception as exc:
print(exc)
return error_views.notfound(request)
try:
resfile = UploadedFileTicket.objects.get(for_ticket=comment, filename=filename)
except Exception as exc:
print(exc)
return error_views.notfound(request)
response = HttpResponse(resfile.uplfile.read(), 'image')
return response
def save_file_ticket(ticket, ufile):
path = ticket.get_files_folder()
addition = ""
filepath = os.path.join(path, addition, ufile.name)
while os.path.exists(filepath):
addition+=str(random.randint(0,9))
filepath = os.path.join(path, addition, ufile.name)
with default_storage.open(filepath, 'wb+') as destination:
for chunk in ufile.chunks():
destination.write(chunk)
upf = UploadedFileTicket(for_ticket=ticket, uplfile=filepath, filename=quote(os.path.basename(filepath)))
upf.save()
return upf
def save_file_comment(comment, ufile):
path = comment.get_files_folder()
addition = ""
filepath = os.path.join(path, addition, ufile.name)
while os.path.exists(filepath):
addition+=str(random.randint(0,9))
filepath = os.path.join(path, addition, ufile.name)
with default_storage.open(filepath, 'wb+') as destination:
for chunk in ufile.chunks():
destination.write(chunk)
upf = UploadedFileComment(for_comment=comment, uplfile=filepath, filename=quote(os.path.basename(filepath)))
upf.save()
return upf
| 40.057803
| 169
| 0.722799
| 866
| 6,930
| 5.612009
| 0.161663
| 0.028807
| 0.032922
| 0.049383
| 0.840329
| 0.840329
| 0.838683
| 0.830041
| 0.830041
| 0.830041
| 0
| 0.002797
| 0.174459
| 6,930
| 173
| 170
| 40.057803
| 0.846705
| 0
| 0
| 0.765957
| 0
| 0
| 0.050209
| 0.00303
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156028
| false
| 0
| 0.078014
| 0.099291
| 0.531915
| 0.056738
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8df61e25f0bf35039cc0b0fbd632cec209cb2a56
| 211
|
py
|
Python
|
baselines_merl/common/__init__.py
|
yfletberliac/MERL
|
6eca6c3c9fa0fbd766a82ef9a85fa383b8f649c9
|
[
"MIT"
] | 3
|
2019-10-25T12:01:54.000Z
|
2022-03-31T10:32:26.000Z
|
baselines_merl/common/__init__.py
|
yfletberliac/MERL
|
6eca6c3c9fa0fbd766a82ef9a85fa383b8f649c9
|
[
"MIT"
] | 2
|
2020-04-23T16:18:03.000Z
|
2020-10-29T21:09:09.000Z
|
baselines_merl/common/__init__.py
|
yfletberliac/MERL
|
6eca6c3c9fa0fbd766a82ef9a85fa383b8f649c9
|
[
"MIT"
] | null | null | null |
# flake8: noqa F403
from baselines_merl.common.console_util import *
from baselines_merl.common.dataset import Dataset
from baselines_merl.common.math_util import *
from baselines_merl.common.misc_util import *
| 35.166667
| 49
| 0.843602
| 31
| 211
| 5.516129
| 0.419355
| 0.304094
| 0.397661
| 0.538012
| 0.385965
| 0.385965
| 0
| 0
| 0
| 0
| 0
| 0.020942
| 0.094787
| 211
| 5
| 50
| 42.2
| 0.874346
| 0.080569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
eb4c44ab34cdde786dff8ef8184c8a40cdc7b6c7
| 7,804
|
py
|
Python
|
python/dllib/src/bigdl/dllib/keras/layers/recurrent.py
|
DirkFi/BigDL
|
7493209165c046116470b9a1e1c8f527915d6f1e
|
[
"Apache-2.0"
] | 3
|
2021-07-14T01:28:47.000Z
|
2022-03-02T01:16:32.000Z
|
python/dllib/src/bigdl/dllib/keras/layers/recurrent.py
|
DirkFi/BigDL
|
7493209165c046116470b9a1e1c8f527915d6f1e
|
[
"Apache-2.0"
] | null | null | null |
python/dllib/src/bigdl/dllib/keras/layers/recurrent.py
|
DirkFi/BigDL
|
7493209165c046116470b9a1e1c8f527915d6f1e
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from ..engine.topology import ZooKerasLayer
if sys.version >= '3':
long = int
unicode = str
class SimpleRNN(ZooKerasLayer):
"""
A fully-connected recurrent neural network cell. The output is to be fed back to input.
The input of this layer should be 3D, i.e. (batch, time steps, input dim).
When you use this layer as the first layer of a model, you need to provide the argument
input_shape (a shape tuple, does not include the batch dimension).
# Arguments
output_dim: Hidden unit size. Dimension of internal projections and final output.
activation: String representation of the activation function to use
(such as 'relu' or 'sigmoid'). Default is 'tanh'.
return_sequences: Whether to return the full sequence or only return the last output
in the output sequence. Default is False.
go_backwards: Whether the input sequence will be processed backwards. Default is False.
W_regularizer: An instance of [[Regularizer]], (eg. L1 or L2 regularization),
applied to the input weights matrices. Default is None.
U_regularizer: An instance of [[Regularizer]], applied the recurrent weights matrices.
Default is None.
b_regularizer: An instance of [[Regularizer]], applied to the bias. Default is None.
input_shape: A shape tuple, not including batch.
name: String to set the name of the layer.
If not specified, its name will by default to be a generated string.
>>> simplernn = SimpleRNN(16, input_shape=(3, 32))
creating: createZooKerasSimpleRNN
"""
def __init__(self, output_dim, activation="tanh", return_sequences=False,
go_backwards=False, W_regularizer=None, U_regularizer=None,
b_regularizer=None, input_shape=None, **kwargs):
super(SimpleRNN, self).__init__(None,
output_dim,
activation,
return_sequences,
go_backwards,
W_regularizer,
U_regularizer,
b_regularizer,
list(input_shape) if input_shape else None,
**kwargs)
class GRU(ZooKerasLayer):
"""
Gated Recurrent Unit architecture.
The input of this layer should be 3D, i.e. (batch, time steps, input dim).
When you use this layer as the first layer of a model, you need to provide the argument
input_shape (a shape tuple, does not include the batch dimension).
# Arguments
output_dim: Hidden unit size. Dimension of internal projections and final output.
activation: String representation of the activation function to use
(such as 'relu' or 'sigmoid'). Default is 'tanh'.
inner_activation: String representation of the activation function for inner cells.
Default is 'hard_sigmoid'.
return_sequences: Whether to return the full sequence or only return the last output
in the output sequence. Default is False.
go_backwards: Whether the input sequence will be processed backwards. Default is False.
W_regularizer: An instance of [[Regularizer]], (eg. L1 or L2 regularization),
applied to the input weights matrices. Default is None.
U_regularizer: An instance of [[Regularizer]], applied the recurrent weights matrices.
Default is None.
b_regularizer: An instance of [[Regularizer]], applied to the bias. Default is None.
input_shape: A shape tuple, not including batch.
name: String to set the name of the layer.
If not specified, its name will by default to be a generated string.
>>> gru = GRU(24, input_shape=(32, 32))
creating: createZooKerasGRU
"""
def __init__(self, output_dim, activation="tanh", inner_activation="hard_sigmoid",
return_sequences=False, go_backwards=False, W_regularizer=None,
U_regularizer=None, b_regularizer=None, input_shape=None, **kwargs):
super(GRU, self).__init__(None,
output_dim,
activation,
inner_activation,
return_sequences,
go_backwards,
W_regularizer,
U_regularizer,
b_regularizer,
list(input_shape) if input_shape else None,
**kwargs)
class LSTM(ZooKerasLayer):
"""
Long Short Term Memory unit architecture.
The input of this layer should be 3D, i.e. (batch, time steps, input dim).
When you use this layer as the first layer of a model, you need to provide the argument
input_shape (a shape tuple, does not include the batch dimension).
# Arguments
output_dim: Hidden unit size. Dimension of internal projections and final output.
activation: String representation of the activation function to use
(such as 'relu' or 'sigmoid'). Default is 'tanh'.
inner_activation: String representation of the activation function for inner cells.
Default is 'hard_sigmoid'.
return_sequences: Whether to return the full sequence or only return the last output
in the output sequence. Default is False.
go_backwards: Whether the input sequence will be processed backwards. Default is False.
W_regularizer: An instance of [[Regularizer]], (eg. L1 or L2 regularization),
applied to the input weights matrices. Default is None.
U_regularizer: An instance of [[Regularizer]], applied the recurrent weights matrices.
Default is None.
b_regularizer: An instance of [[Regularizer]], applied to the bias. Default is None.
input_shape: A shape tuple, not including batch.
name: String to set the name of the layer.
If not specified, its name will by default to be a generated string.
>>> lstm = LSTM(32, input_shape=(8, 16), name="lstm1")
creating: createZooKerasLSTM
"""
def __init__(self, output_dim, activation="tanh", inner_activation="hard_sigmoid",
return_sequences=False, go_backwards=False, W_regularizer=None,
U_regularizer=None, b_regularizer=None, input_shape=None, **kwargs):
super(LSTM, self).__init__(None,
output_dim,
activation,
inner_activation,
return_sequences,
go_backwards,
W_regularizer,
U_regularizer,
b_regularizer,
list(input_shape) if input_shape else None,
**kwargs)
| 50.025641
| 91
| 0.611097
| 926
| 7,804
| 5.035637
| 0.196544
| 0.038602
| 0.040532
| 0.044392
| 0.813854
| 0.813854
| 0.80935
| 0.802059
| 0.802059
| 0.802059
| 0
| 0.006673
| 0.327909
| 7,804
| 155
| 92
| 50.348387
| 0.882364
| 0.620964
| 0
| 0.714286
| 0
| 0
| 0.014005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061224
| false
| 0
| 0.040816
| 0
| 0.163265
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
eb64bae14fe5e96c214886a14f136cc807c7e81a
| 32,914
|
py
|
Python
|
dag/tests/test_core.py
|
HAL-42/AlchemyCat
|
ca924755ff48e2ff74543bb0e446376eb2b1f150
|
[
"Apache-2.0"
] | 8
|
2020-01-08T19:42:01.000Z
|
2021-12-28T08:30:56.000Z
|
dag/tests/test_core.py
|
HAL-42/AlchemyCat
|
ca924755ff48e2ff74543bb0e446376eb2b1f150
|
[
"Apache-2.0"
] | 2
|
2020-09-10T12:22:57.000Z
|
2022-02-17T05:21:22.000Z
|
dag/tests/test_core.py
|
HAL-42/AlchemyCat
|
ca924755ff48e2ff74543bb0e446376eb2b1f150
|
[
"Apache-2.0"
] | 1
|
2021-05-12T01:50:27.000Z
|
2021-05-12T01:50:27.000Z
|
import pytest
from alchemy_cat.dag.core import Graph, PyungoError
from alchemy_cat.dag.io import Input, Output
def test_simple():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
# make sure it is independent
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
def test_call():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
for _ in range(2):
res = graph(a=2, b=3)
assert res == -1.5
assert graph.data['e'] == -1.5
with pytest.raises(PyungoError, match="Graph only receive keyword args which will be "
"recognized as input name and value."):
graph(2, 3)
def test_inputs_args_equivalent():
graph = Graph()
@graph.register(args=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(args=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(args=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
# make sure it is independent
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
def test_inputs_args_blend():
graph = Graph()
@graph.register(inputs=['a'], args=['b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d'], args=['a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(args=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
# make sure it is independent
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
def test_simple_constant_inputs():
graph = Graph()
@graph.register(inputs=[{'a': 2}, {'b': 3}], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
res = graph.calculate(data={'a': 2})
assert res == -1.5
assert graph.data['e'] == -1.5
# make sure it is independent
res = graph.calculate(data={'a': 2})
assert res == -1.5
assert graph.data['e'] == -1.5
def test_constant_args_kwargs():
graph = Graph()
@graph.register(
inputs=['a', 'b'],
args=['c', {'cc': 6}],
kwargs=['d', {'dc': 7}],
outputs=['e']
)
def f_my_function(a, b, *args, **kwargs):
return a + b + args[0] + args[1] + kwargs['d'] + kwargs['dc']
res = graph.calculate(data={'a': 2, 'b': 3, 'c': 4, 'd': 5})
assert res == 27
assert graph.data['e'] == 27
def test_constant_dict_kwargs():
graph = Graph()
@graph.register(
inputs=['a', 'b'],
args=['c'],
kwargs={'d': 5, 'dc': 6},
outputs=['e']
)
def f_my_function(a, b, *args, **kwargs):
return a + b + args[0] + kwargs['d'] + kwargs['dc']
res = graph.calculate(data={'a': 2, 'b': 3, 'c': 4})
assert res == 20
assert graph.data['e'] == 20
def test_complex_constant_inputs():
"""Test complex constant inputs.
* Test {k:v, k:v, ...} constant
* Test Input(name, value=*) constant
* Test kwargs constant
"""
graph = Graph()
# f1 = -1
@graph.register(inputs={'inp_1_1': 2, 'inp_1_2': 3}, kwargs={'inp_1_3': 6}, outputs=['f1'])
def f_my_function1(inp_1_1, inp_1_2=2, inp_1_3=3):
return inp_1_1 + inp_1_2 - inp_1_3
# f2 = (2, -5)
@graph.register(args=['f1', Input('i_2_2', value=-1), {'i_2_3_1': 1}, {'i_2_3_2': -2}],
kwargs=[Input('inp_2_4', value=3)], outputs=['f2'])
def f_my_function2(inp_2_1=4, inp_2_2=5, *inp_2_3, **inp_2_4):
return inp_2_1 * inp_2_2 + inp_2_3[0], inp_2_3[1] - list(inp_2_4.values())[0]
# f3 = -33
@graph.register(inputs=['f1', 'f2', 'inp_3_3'], outputs=['f3'])
def f_my_function3(inp_3_1, inp_3_2, inp_3_3=4):
return (inp_3_1 - inp_3_2[0] * inp_3_2[1]) * inp_3_3
for _ in range(2):
res = graph.calculate(data={'inp_3_3': 3})
assert res == 27
assert graph.data['f3'] == res
assert graph.data['f1'] == -1
assert graph.data['f2'] == (2, -5)
def test_slim_graph():
import numpy as np
graph = Graph(slim=True)
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
for _ in range(2):
data = {'a': np.ones((2, 2)) * 2., 'b': np.ones((2, 2)) * 3.}
res = graph.calculate(data)
assert (res == np.ones((2, 2)) * -1.5).all()
assert (graph.data['e'] == np.ones((2, 2)) * -1.5).all()
assert id(graph.ordered_nodes[0]._outputs[0].value) == id(graph.ordered_nodes[1]._inputs[0].value)
assert id(graph.ordered_nodes[1]._outputs[0].value) == id(graph.ordered_nodes[2]._inputs[0].value)
assert id(data['a']) == id(graph.ordered_nodes[0]._inputs[0].value)
assert id(data['b']) == id(graph.ordered_nodes[0]._inputs[1].value)
def test_not_slim_graph():
import numpy as np
graph = Graph(slim=False)
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
for _ in range(2):
data = {'a': np.ones((2, 2)) * 2., 'b': np.ones((2, 2)) * 3.}
res = graph.calculate(data)
assert (res == np.ones((2, 2)) * -1.5).all()
assert (graph.data['e'] == np.ones((2, 2)) * -1.5).all()
assert id(graph.ordered_nodes[0]._outputs[0].value) != id(graph.ordered_nodes[1]._inputs[0].value)
assert id(graph.ordered_nodes[1]._outputs[0].value) != id(graph.ordered_nodes[2]._inputs[0].value)
assert id(data['a']) != id(graph.ordered_nodes[0]._inputs[0].value)
assert id(data['b']) != id(graph.ordered_nodes[0]._inputs[1].value)
def test_node_slim_graph():
import numpy as np
graph = Graph(slim=False)
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'], slim_names=['c'])
def f_my_function2(c):
return c / 10.
res = graph.calculate(data={'a': np.ones((2, 2)) * 2., 'b': np.ones((2, 2)) * 3.})
assert (res == np.ones((2, 2)) * -1.5).all()
assert (graph.data['e'] == np.ones((2, 2)) * -1.5).all()
assert id(graph.ordered_nodes[0]._outputs[0].value) == id(graph.ordered_nodes[1]._inputs[0].value)
assert id(graph.ordered_nodes[1]._outputs[0].value) != id(graph.ordered_nodes[2]._inputs[0].value)
res = graph.calculate(data={'a': np.ones((2, 2)) * 2., 'b': np.ones((2, 2)) * 3.})
assert (res == np.ones((2, 2)) * -1.5).all()
assert (graph.data['e'] == np.ones((2, 2)) * -1.5).all()
assert id(graph.ordered_nodes[0]._outputs[0].value) == id(graph.ordered_nodes[1]._inputs[0].value)
assert id(graph.ordered_nodes[1]._outputs[0].value) != id(graph.ordered_nodes[2]._inputs[0].value)
def test_functor():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'], init={})
class f_my_function(object):
factor = 1
def __init__(self):
self.dummy = 100
def __call__(self, a1, a2):
return a1 + a2 + self.factor
@graph.register(inputs=['d', 'a'], outputs=['e'], init={'add_or_sub': 'add', 'factor': 2})
class f_my_function2(object):
def __init__(self, add_or_sub='add', factor=1):
if add_or_sub == 'add':
self.func = lambda x, y: x + y
else:
self.func = lambda x, y: x - y
self.factor = factor
def generate_constant(self):
return 1
def __call__(self, a1, a2):
return (self.func(a1, a2) + self.generate_constant()) * self.factor
@graph.register(inputs=['c', 'b'], outputs=['d'], init={'add_or_sub': 'sub'})
class f_my_function1(object):
def __init__(self, add_or_sub='add', factor=1):
if add_or_sub == 'add':
self.func = lambda x, y: x + y
else:
self.func = lambda x, y: x - y
self.factor = factor
def generate_constant(self):
return 1
def __call__(self, a1, a2):
return (self.func(a1, a2) + self.generate_constant()) * self.factor
res = graph.calculate(data={'a': 2, 'b': 3})
assert graph.data['c'] == 6
assert graph.data['d'] == 4
assert res == 14
assert graph.data['e'] == 14
# make sure it is independent
res = graph.calculate(data={'a': 2, 'b': 3})
assert graph.data['c'] == 6
assert graph.data['d'] == 4
assert res == 14
assert graph.data['e'] == 14
def test_simple_without_decorator():
graph = Graph()
def f_my_function(a, b):
return a + b
def f_my_function3(d, a):
return d - a
def f_my_function2(c):
return c / 10.
graph.add_node(f_my_function, inputs=['a', 'b'], outputs=['c'])
graph.add_node(f_my_function3, inputs=['d', 'a'], outputs=['e'])
graph.add_node(f_my_function2, inputs=['c'], outputs=['d'])
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
def par_f_my_function(a, b):
return a + b
def par_f_my_function3(d, a):
return d - a
def par_f_my_function2(c):
return c / 10.
def test_simple_parallel():
""" TODO: We could mock and make sure things are called correctly """
graph = Graph(pool_size=2)
graph.add_node(par_f_my_function, inputs=['a', 'b'], outputs=['c'])
graph.add_node(par_f_my_function3, inputs=['d', 'a'], outputs=['e'])
graph.add_node(par_f_my_function2, inputs=['c'], outputs=['d'])
graph.add_node(par_f_my_function2, inputs=['c'], outputs=['f'])
graph.add_node(par_f_my_function2, inputs=['c'], outputs=['g'])
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
def test_multiple_outputs():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c', 'd'])
def f_my_function(a, b):
return a + b, 2 * b
@graph.register(inputs=['c', 'd'], outputs=['e'])
def f_my_function2(c, d):
return c + d
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 11
assert graph.data['e'] == 11
def test_same_output_names():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
@graph.register(inputs=['c'], outputs=['c'])
def f_my_function2(c):
return c / 10
assert "Node Node(<f_my_function2>, ['c'], ['c']) have repeated output names: ['c']" in str(err.value)
def test_missing_input():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6})
assert "The following inputs are needed: ['b']" in str(err.value)
def test_missing_kwargs():
graph = Graph()
@graph.register(inputs=['a'], kwargs=['b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6})
assert "The following inputs are needed: ['b']" in str(err.value)
def test_missing_input_both_nec_opt():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b=2):
return a + b
@graph.register(kwargs=['a', 'b'], outputs=['e'])
def f_my_function3(a, b):
return a - b
@graph.register(inputs=['c', 'e'], outputs=['f'])
def f_my_function2(c, e):
return c + e / 10.
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6})
assert "The following inputs are needed: ['b']" in str(err.value)
def test_inputs_not_used():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6, 'b': 4, 'e': 7})
assert "The following inputs are not used by the model: ['e']" in str(err.value)
def test_inputs_not_used_with_constant():
graph = Graph()
@graph.register(inputs=[{'a': 1}, 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6, 'b': 4})
assert "The following inputs are not used by the model: ['a']" in str(err.value)
def test_opt_inputs_wont_cause_redundant_input():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b=2):
return a + b
res = graph.calculate(data={'a': 6})
assert res == 8
def test_inputs_collision():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6, 'b': 4, 'c': 7})
assert "The following inputs are already used in the model: ['c']" in str(err.value)
def test_self_dependence():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
with pytest.raises(PyungoError) as err:
@graph.register(inputs=['c', 'd', 'e', {'f': 1}], outputs=['d', 'e', 'f'])
def f_my_function2(c, d, e):
return c, d, e
assert "Node Node(<f_my_function2>, ['c', 'd', 'e', 'f'], ['d', 'e', 'f']) have self dependence caused " \
"by the following inputs: ['d', 'e']" in str(err.value)
def test_circular_dependency():
graph = Graph()
@graph.register(inputs=['a', 'b', 'd'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 2.
with pytest.raises(PyungoError) as err:
graph.calculate(data={'a': 6, 'b': 4})
assert "A cyclic dependency exists amongst" in str(err.value)
def test_iterable_on_single_output():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return list(range(a)) + [b]
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == [0, 1, 3]
assert graph.data['c'] == [0, 1, 3]
def test_multiple_outputs_with_iterable():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c', 'd'])
def f_my_function(a, b):
return list(range(a)) + [b], b * 10
res = graph.calculate(data={'a': 2, 'b': 3})
assert isinstance(res, tuple) is True
assert graph.data['c'] == [0, 1, 3]
assert graph.data['d'] == 30
assert res[0] == [0, 1, 3]
assert res[1] == 30
def test_args_kwargs():
graph = Graph()
@graph.register(
inputs=['a', 'b'],
args=['c'],
kwargs=['d'],
outputs=['e']
)
def f_my_function(a, b, *args, **kwargs):
return a + b + args[0] + kwargs['d']
res = graph.calculate(data={'a': 2, 'b': 3, 'c': 4, 'd': 5})
assert res == 14
assert graph.data['e'] == 14
def test_diff_input_function_arg_name():
graph = Graph()
@graph.register(
inputs=['a_diff', 'b_diff'],
args=['c_diff'],
kwargs=['d'],
outputs=['e_diff']
)
def f_my_function(a, b, *args, **kwargs):
return a + b + args[0] + kwargs['d']
res = graph.calculate(data={'a_diff': 2, 'b_diff': 3, 'c_diff': 4, 'd': 5})
assert res == 14
assert graph.data['e_diff'] == 14
def test_dag_pretty_print():
graph = Graph()
@graph.register(inputs=['a', 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
@graph.register(inputs=['d', 'a'], outputs=['e'])
def f_my_function3(d, a):
return d - a
@graph.register(inputs=['c'], outputs=['d'])
def f_my_function2(c):
return c / 10.
expected = ['f_my_function', 'f_my_function2', 'f_my_function3']
dag = graph.dag
for i, fct_name in enumerate(expected):
assert dag[i][0].fct_name == fct_name
def test_passing_data_to_node_definition():
graph = Graph()
@graph.register(inputs=['a', {'b': 2}], outputs=['c'])
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'a': 5})
assert res == 7
def test_Input_type_input():
graph = Graph()
@graph.register(
inputs=[Input(name='a'), Input(name='inp_1_1', map='b')],
outputs=['c']
)
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 5
def test_input_type_tuple():
graph = Graph()
@graph.register(
inputs=[('inp_1', 'a'), ('b', 'b')],
outputs=['c']
)
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 5
def test_wrong_input_type():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=['a', {'b'}], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "inputs need to be of type tuple, Input, str or dict" in str(err.value)
def test_input_tuple_too_long():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=[('a', 'input', 'too_long'), 'b'], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "Tuple input should like (name, map). However, get ('a', 'input', 'too_long')" in str(err.value)
def test_empty_input_dict():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=['a', {}], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "dict inputs should have only one key and cannot be empty" in str(err.value)
def test_multiple_keys_input_dict():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=['a', {'b': 1, 'c': 2}], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "dict inputs should have only one key and cannot be empty" in str(err.value)
def test_not_str_name():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=[(23, 'a')], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "IO name must be str, however get name = 23 with type <class 'int'>" in str(err.value)
def test_not_str_map():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register(inputs=[Input('a', map=23)], outputs=['c'])
def f_my_function(a, b):
return a + b
assert "IO map must be str, however get map = 23 with type <class 'int'>" in str(err.value)
@pytest.mark.skip("Don't Support Contract Now")
def test_contract_inputs():
from contracts import ContractNotRespected
graph = Graph()
@graph.register(
inputs=[Input(name='a', contract='int,>0'), 'b'],
outputs=['c']
)
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 5
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 5
with pytest.raises(ContractNotRespected) as err:
res = graph.calculate(data={'a': -2, 'b': 3})
assert "Condition -2 > 0 not respected" in str(err.value)
@pytest.mark.skip("Don't Support Contract Now")
def test_contract_outputs():
from contracts import ContractNotRespected
graph = Graph()
@graph.register(
inputs=['a', 'b'],
outputs=[Output('c', contract='int,>0')]
)
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == 5
with pytest.raises(ContractNotRespected) as err:
res = graph.calculate(data={'a': -4, 'b': 3})
assert "Condition -1 > 0 not respected" in str(err.value)
def test_map():
graph = Graph()
@graph.register(
inputs=[Input('a', map='q'), Input('b', map='w')],
outputs=[Output('c', map='e')]
)
def f_my_function(a, b):
return a + b
res = graph.calculate(data={'q': 2, 'w': 3})
assert res == 5
assert graph.data['e'] == 5
def test_build_with_map_feed_with_name():
graph = Graph()
@graph.register(inputs=[('foo', 'a')], kwargs=[('inp1_2', 'b')], outputs=['c'])
def f_my_function1(inp1_1, inp1_2):
return inp1_1 + inp1_2
@graph.register(args=[Input(name='foo', map='d')], kwargs=[Input(name='inp3_2', map='a')], outputs=['e'])
def f_my_function3(inp3_1, inp3_2):
return inp3_1 - inp3_2
@graph.register(inputs=[('foo', 'c')], outputs=['d'])
def f_my_function2(inp2_1):
return inp2_1 / 10.
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
# make sure it is independent
res = graph.calculate(data={'a': 2, 'b': 3})
assert res == -1.5
assert graph.data['e'] == -1.5
def test_schema():
from jsonschema import ValidationError
schema = {
"type": "object",
"properties": {
"a": {"type": "number"},
"b": {"type": "number"}
}
}
graph = Graph(schema=schema)
@graph.register(
inputs=['a', 'b'],
outputs=['c']
)
def f_my_function(a, b):
return a + b
with pytest.raises(ValidationError) as err:
graph.calculate(data={'a': 1, 'b': '2'})
msg = "'2' is not of type 'number'"
assert msg in str(err.value)
res = graph.calculate(data={'a': 1, 'b': 2})
assert res == 3
def test_find_default_by_name_not_map():
graph = Graph()
@graph.register(inputs=['a', ('inp2', 'b')], kwargs=[('inp3', 'c')], outputs=['d'])
def f(inp1, inp2=2, inp3=3):
return inp1 + inp2 + inp3
res = graph.calculate(data={'a': 1})
assert res == 6
assert graph.data['d'] == 6
def test_optional_inputs_without_feed():
graph = Graph()
@graph.register(inputs=['a', 'b'], kwargs=['c'], outputs=['d'])
def f(a, b=2, c=3):
return a + b + c
res = graph.calculate(data={'a': 1})
assert res == 6
assert graph.data['d'] == 6
def test_optional_inputs_feed_by_input():
graph = Graph()
@graph.register(inputs=['a', 'b'], kwargs=['c'], outputs=['d'])
def f(a, b=2, c=3):
return a + b + c
res = graph.calculate(data={'a': 1, 'b': -1, 'c': -2})
assert res == -2
assert graph.data['d'] == -2
def test_optional_inputs_feed_by_output():
graph = Graph()
@graph.register(inputs=['a'], kwargs=['b'], outputs=['c'])
def f(a, b):
return a + b
@graph.register(inputs=['a'], kwargs=['b'], outputs=['d'])
def f2(a, b):
return a - b
@graph.register(inputs=['c'], kwargs=[Input(map='d', name='inp2')], outputs=['e'])
def f1(inp1=0, inp2=0):
return inp1 + inp2
res = graph.calculate(data={'a': 1, 'b': 3})
assert res == 2
assert graph.data['e'] == res
assert graph.data['c'] == 4
assert graph.data['d'] == -2
def test_no_explicit_inputs_outputs_simple():
graph = Graph()
@graph.register()
def f(a, b):
c = a + b
return c
res = graph.calculate(data={'a': 1, 'b': 2})
assert res == 3
assert graph.data['c'] == 3
def test_no_explicit_inputs_outputs_tuple():
graph = Graph()
@graph.register()
def f(a, b, c, d):
e = a + b
f = c - d
return e, f
res = graph.calculate(data={'a': 1, 'b': 2, 'c': 3, 'd': 4})
assert res == (3, -1)
assert graph.data['e'] == 3
assert graph.data['f'] == -1
def test_no_explicit_inputs_outputs_bad_return():
graph = Graph()
with pytest.raises(PyungoError) as err:
@graph.register()
def f(a, b):
return a + b
expected = ('Variable name or Tuple of variable '
'names are expected, got BinOp')
assert str(err.value) == expected
def test_sub_graph():
def f_my_function1(inp1_1, inp1_2):
return inp1_1 + inp1_2
def f_my_function2(inp2_1):
return inp2_1 / 10.
def f_my_function3(inp3_1, inp3_2):
return inp3_1 - inp3_2
graph0 = Graph()
graph0_0 = Graph()
graph0_1 = Graph()
graph0_1_0 = Graph()
# inp2_1 = 1.5, inp3_2 = 0.15, out=0.1785
graph0.add_node(graph0_0, kwargs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0.add_node(graph0_1, kwargs=[('inp1', 'inp1_1'), ('inp2', 'inp3_2')], outputs=['out'])
graph0_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# inp2_1 = 0.215, inp3_2 = 0.0215, out = 0.1785
graph0_1_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_1_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_1_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# out1 = 0.2, out2 = 0.015, out=0.1785
graph0_1.add_node(f_my_function2, inputs=['inp1'], outputs=['out1'])
graph0_1.add_node(f_my_function2, inputs=['inp2'], outputs=['out2'])
graph0_1.add_node(graph0_1_0, kwargs=[('inp1_1', 'out1'), ('inp1_2', 'out2')], outputs=['out3'])
for _ in range(2):
res = graph0(inp1_1=2, inp1_2=3)
assert 0.1785 == pytest.approx(res)
assert graph0.data['out'] == res
assert graph0_0.data['out'] == 1.5
assert graph0_1.data['out3'] == pytest.approx(0.1785)
assert graph0_1_0.data['out'] == pytest.approx(0.1785)
def test_sub_graph_with_arg_input():
graph0 = Graph()
graph0_0 = Graph()
with pytest.raises(PyungoError) as err:
graph0.add_node(graph0_0, args=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
assert "Node with Graph can only accept kwargs input. However, get args = ['inp1_1', 'inp1_2']" in str(err.value)
def test_deep_ordered_nodes():
def f_my_function1(inp1_1, inp1_2):
return inp1_1 + inp1_2
def f_my_function2(inp2_1):
return inp2_1 / 10.
def f_my_function3(inp3_1, inp3_2):
return inp3_1 - inp3_2
graph0 = Graph()
graph0_0 = Graph()
graph0_1 = Graph()
graph0_1_0 = Graph()
# inp2_1 = 1.5, inp3_2 = 0.15, out=0.1785
graph0.add_node(graph0_0, kwargs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0.add_node(graph0_1, kwargs=[('inp1', 'inp1_1'), ('inp2', 'inp3_2')], outputs=['out'])
graph0_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# inp2_1 = 0.215, inp3_2 = 0.0215, out = 0.1785
graph0_1_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_1_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_1_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# out1 = 0.2, out2 = 0.015, out=0.1785
graph0_1.add_node(f_my_function2, inputs=['inp1'], outputs=['out1'])
graph0_1.add_node(f_my_function2, inputs=['inp2'], outputs=['out2'])
graph0_1.add_node(graph0_1_0, kwargs=[('inp1_1', 'out1'), ('inp1_2', 'out2')], outputs=['out3'])
deep_ordered_nodes = graph0.deep_ordered_nodes
graph0_nodes = graph0.ordered_nodes
graph0_0_nodes = graph0_0.ordered_nodes
graph0_1_nodes = graph0_1.ordered_nodes
graph0_1_0_nodes = graph0_1_0.ordered_nodes
assert deep_ordered_nodes[0] is graph0_nodes[0]
for n1, n2 in zip(deep_ordered_nodes[1:4], graph0_0_nodes):
assert n1 is n2
assert deep_ordered_nodes[4] is graph0_nodes[1]
assert deep_ordered_nodes[5] is graph0_nodes[2]
for n1, n2 in zip(deep_ordered_nodes[6:9], graph0_1_nodes):
assert n1 is n2
for n1, n2 in zip(deep_ordered_nodes[9:], graph0_1_0_nodes):
assert n1 is n2
def test_deep_prefix_id_ordered_nodes():
def f_my_function1(inp1_1, inp1_2):
return inp1_1 + inp1_2
def f_my_function2(inp2_1):
return inp2_1 / 10.
def f_my_function3(inp3_1, inp3_2):
return inp3_1 - inp3_2
graph0 = Graph()
graph0_0 = Graph()
graph0_1 = Graph()
graph0_1_0 = Graph()
# inp2_1 = 1.5, inp3_2 = 0.15, out=0.1785
graph0.add_node(graph0_0, kwargs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0.add_node(graph0_1, kwargs=[('inp1', 'inp1_1'), ('inp2', 'inp3_2')], outputs=['out'])
graph0_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# inp2_1 = 0.215, inp3_2 = 0.0215, out = 0.1785
graph0_1_0.add_node(f_my_function1, inputs=['inp1_1', 'inp1_2'], outputs=['inp2_1'])
graph0_1_0.add_node(f_my_function2, inputs=['inp2_1'], outputs=['inp3_2'])
graph0_1_0.add_node(f_my_function3, inputs=[('inp3_1', 'inp1_1'), 'inp3_2'], outputs=['out'])
# out1 = 0.2, out2 = 0.015, out=0.1785
graph0_1.add_node(f_my_function2, inputs=['inp1'], outputs=['out1'])
graph0_1.add_node(f_my_function2, inputs=['inp2'], outputs=['out2'])
graph0_1.add_node(graph0_1_0, kwargs=[('inp1_1', 'out1'), ('inp1_2', 'out2')], outputs=['out3'])
prefix_ids, deep_ordered_nodes = zip(*graph0.deep_prefix_id_ordered_nodes())
graph0_nodes = graph0.ordered_nodes
graph0_0_nodes = graph0_0.ordered_nodes
graph0_1_nodes = graph0_1.ordered_nodes
graph0_1_0_nodes = graph0_1_0.ordered_nodes
graph0_prefix_ids = [n.id for n in graph0.ordered_nodes]
graph0_0_prefix_ids = [graph0_nodes[0].id + '.' + n.id for n in graph0_0.ordered_nodes]
graph0_1_prefix_ids = [graph0_nodes[2].id + '.' + n.id for n in graph0_1.ordered_nodes]
graph0_1_0_prefix_ids = [graph0_nodes[2].id + '.' + graph0_1_nodes[2].id + '.' + n.id
for n in graph0_1_0.ordered_nodes]
assert deep_ordered_nodes[0] is graph0_nodes[0]
for n1, n2 in zip(deep_ordered_nodes[1:4], graph0_0_nodes):
assert n1 is n2
assert deep_ordered_nodes[4] is graph0_nodes[1]
assert deep_ordered_nodes[5] is graph0_nodes[2]
for n1, n2 in zip(deep_ordered_nodes[6:9], graph0_1_nodes):
assert n1 is n2
for n1, n2 in zip(deep_ordered_nodes[9:], graph0_1_0_nodes):
assert n1 is n2
assert prefix_ids[0] == graph0_prefix_ids[0]
for pi1, pi2 in zip(prefix_ids[1:4], graph0_0_prefix_ids):
assert pi1 == pi2
assert prefix_ids[4] == graph0_prefix_ids[1]
assert prefix_ids[5] == graph0_prefix_ids[2]
for pi1, pi2 in zip(prefix_ids[6:9], graph0_1_prefix_ids):
assert pi1 == pi2
for pi1, pi2 in zip(prefix_ids[9:], graph0_1_0_prefix_ids):
assert pi1 == pi2
| 29.075972
| 117
| 0.584189
| 5,073
| 32,914
| 3.594717
| 0.050857
| 0.014367
| 0.026651
| 0.048969
| 0.820684
| 0.794527
| 0.743968
| 0.708544
| 0.696809
| 0.665716
| 0
| 0.053422
| 0.231664
| 32,914
| 1,131
| 118
| 29.10168
| 0.667682
| 0.022847
| 0
| 0.657658
| 0
| 0.002574
| 0.081205
| 0.001371
| 0
| 0
| 0
| 0.000884
| 0.184041
| 1
| 0.200772
| false
| 0.001287
| 0.011583
| 0.123552
| 0.343629
| 0.001287
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
ebc7c3403e28fc02dfde353880f2f7cd13d71155
| 1,832
|
py
|
Python
|
example/to_see.py
|
Bytom/python-bytomlib
|
f621b282a221ee5195332900144159aa9fee97e2
|
[
"MIT"
] | 1
|
2021-09-02T08:30:54.000Z
|
2021-09-02T08:30:54.000Z
|
example/to_see.py
|
Bytom/python-bytomlib
|
f621b282a221ee5195332900144159aa9fee97e2
|
[
"MIT"
] | null | null | null |
example/to_see.py
|
Bytom/python-bytomlib
|
f621b282a221ee5195332900144159aa9fee97e2
|
[
"MIT"
] | null | null | null |
# coding:utf-8
from pybtmsdk import BytomAPI
from pybtmsdk.transaction import decode_raw_tx, encode_raw_tx
from pybtmsdk.signature import generate_signatures_use_mnemonic
url = 'http://139.224.216.240:9887'
access_token = 'YOUR_ACCESS_TOKEN'
api = BytomAPI(url=url)
print("my_sign:", api.decode_raw_transaction("0701dfd5c8d505010161015feba550aa3865b36103c49c8a261e5350edeea499b1fb7aed80a60ab498c74900ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8094ebdc030101160014be416094fef308e719c6e270b4fd1d0259975729008301024079363f5dd0f1f9f6ec003901283cf8907243fa5ca366f19d3af5faf8bdc107142facf53e3bc8c592c43fbbe11a25b4afec5453b3616eda285fe3e8adcfb0e30a40d0867e7bdc24c8e6e198264fca59b1dc5f3183f5e4105ba96449805c3e56a270258b269bb7b4dba9695496a54acd6b32cda224719b1bc2df73f62e2a19a71ca60201003effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80dfd2dc0301160014e04d3ae5387b6b97a686abcbef81c2f69f27ddc9000001003cffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb0ea01011600148b93f5a0fffb0ae88ea82cf90350cef6f008d7370000", return_dict=True))
print("api_sign:", api.decode_raw_transaction("0701dfd5c8d505010161015feba550aa3865b36103c49c8a261e5350edeea499b1fb7aed80a60ab498c74900ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff8094ebdc030101160014be416094fef308e719c6e270b4fd1d02599757290063024079363f5dd0f1f9f6ec003901283cf8907243fa5ca366f19d3af5faf8bdc107142facf53e3bc8c592c43fbbe11a25b4afec5453b3616eda285fe3e8adcfb0e30a20d0867e7bdc24c8e6e198264fca59b1dc5f3183f5e4105ba96449805c3e56a2700201003effffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff80dfd2dc0301160014e04d3ae5387b6b97a686abcbef81c2f69f27ddc9000001003cffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffb0ea01011600148b93f5a0fffb0ae88ea82cf90350cef6f008d7370000", return_dict=True))
| 87.238095
| 809
| 0.950873
| 63
| 1,832
| 27.365079
| 0.571429
| 0.020882
| 0.015081
| 0.018561
| 0.031323
| 0
| 0
| 0
| 0
| 0
| 0
| 0.379118
| 0.02238
| 1,832
| 20
| 810
| 91.6
| 0.583473
| 0.00655
| 0
| 0
| 1
| 0
| 0.814876
| 0.781267
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.375
| 0
| 0.375
| 0.25
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.